chore: sync local changes
This commit is contained in:
parent
48cb560bfd
commit
eaf41045c5
63
apps/gitea-webhook-ambassador-python/.gitignore
vendored
63
apps/gitea-webhook-ambassador-python/.gitignore
vendored
@ -1,63 +0,0 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# Virtual Environment
|
||||
venv/
|
||||
env/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/*.log
|
||||
|
||||
# Database
|
||||
*.db
|
||||
*.sqlite
|
||||
*.sqlite3
|
||||
|
||||
# PID files
|
||||
*.pid
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Temporary files
|
||||
*.tmp
|
||||
*.temp
|
||||
@ -1,143 +0,0 @@
|
||||
.PHONY: build clean test lint docker-build docker-push run help install init
|
||||
|
||||
# Variables
|
||||
APP_NAME := gitea-webhook-ambassador
|
||||
VERSION := $(shell git describe --tags --always --dirty 2>/dev/null || echo "dev")
|
||||
PYTHON := python3
|
||||
PIP := pip
|
||||
VENV := venv
|
||||
CONFIG_FILE := config.yaml
|
||||
|
||||
# Python commands
|
||||
PYTHON_FILES := $(shell find . -name "*.py" -type f)
|
||||
REQUIREMENTS := requirements.txt
|
||||
|
||||
# Default target
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
# Install dependencies
|
||||
install:
|
||||
@echo "Installing dependencies..."
|
||||
$(PYTHON) -m venv $(VENV)
|
||||
. $(VENV)/bin/activate && $(PIP) install -r $(REQUIREMENTS)
|
||||
|
||||
# Initialize database
|
||||
init:
|
||||
@echo "Initializing database..."
|
||||
. $(VENV)/bin/activate && $(PYTHON) -c "from app.models.database import create_tables; create_tables(); print('Database initialized')"
|
||||
|
||||
# Build (for Python, this means installing dependencies)
|
||||
build: install
|
||||
@echo "Python application ready"
|
||||
|
||||
# Clean build artifacts
|
||||
clean:
|
||||
@echo "Cleaning up..."
|
||||
@rm -rf $(VENV)
|
||||
@rm -f *.db
|
||||
@rm -rf logs/
|
||||
@rm -f *.pid
|
||||
|
||||
# Run tests
|
||||
test:
|
||||
@echo "Running tests..."
|
||||
. $(VENV)/bin/activate && $(PYTHON) test_enhanced_features.py
|
||||
|
||||
# Run linter
|
||||
lint:
|
||||
@echo "Running linter..."
|
||||
. $(VENV)/bin/activate && flake8 app/ --max-line-length=120 --ignore=E501,W503
|
||||
|
||||
# Build Docker image
|
||||
docker-build:
|
||||
@echo "Building Docker image $(APP_NAME):$(VERSION)..."
|
||||
docker build -t $(APP_NAME):$(VERSION) .
|
||||
docker tag $(APP_NAME):$(VERSION) $(APP_NAME):latest
|
||||
|
||||
# Push Docker image to registry
|
||||
docker-push: docker-build
|
||||
@echo "Pushing Docker image $(APP_NAME):$(VERSION)..."
|
||||
docker push $(APP_NAME):$(VERSION)
|
||||
docker push $(APP_NAME):latest
|
||||
|
||||
# Run locally
|
||||
run: build init
|
||||
@echo "Starting $(APP_NAME)..."
|
||||
. $(VENV)/bin/activate && $(PYTHON) -m uvicorn app.main_enhanced:app --host 0.0.0.0 --port 8000
|
||||
|
||||
# Run in background
|
||||
start: build init
|
||||
@echo "Starting $(APP_NAME) in background..."
|
||||
. $(VENV)/bin/activate && nohup $(PYTHON) -m uvicorn app.main_enhanced:app --host 0.0.0.0 --port 8000 > logs/service.log 2>&1 &
|
||||
@echo $$! > service.pid
|
||||
@echo "Service started with PID $$(cat service.pid)"
|
||||
|
||||
# Stop service
|
||||
stop:
|
||||
@if [ -f service.pid ]; then \
|
||||
echo "Stopping $(APP_NAME)..."; \
|
||||
kill $$(cat service.pid) 2>/dev/null || true; \
|
||||
rm -f service.pid; \
|
||||
echo "Service stopped"; \
|
||||
else \
|
||||
echo "No service.pid found"; \
|
||||
fi
|
||||
|
||||
# Restart service
|
||||
restart: stop start
|
||||
|
||||
# Show service status
|
||||
status:
|
||||
@if [ -f service.pid ]; then \
|
||||
PID=$$(cat service.pid); \
|
||||
if ps -p $$PID > /dev/null 2>&1; then \
|
||||
echo "✅ $(APP_NAME) is running (PID: $$PID)"; \
|
||||
echo "📝 Log file: logs/service.log"; \
|
||||
echo "🌐 Access: http://localhost:8000"; \
|
||||
else \
|
||||
echo "❌ $(APP_NAME) is not running (PID file exists but process not found)"; \
|
||||
rm -f service.pid; \
|
||||
fi; \
|
||||
else \
|
||||
echo "❌ $(APP_NAME) is not running"; \
|
||||
fi
|
||||
|
||||
# Show logs
|
||||
logs:
|
||||
@if [ -f logs/service.log ]; then \
|
||||
echo "📝 Latest logs (last 50 lines):"; \
|
||||
echo "----------------------------------------"; \
|
||||
tail -n 50 logs/service.log; \
|
||||
echo "----------------------------------------"; \
|
||||
else \
|
||||
echo "❌ No log file found"; \
|
||||
fi
|
||||
|
||||
# Follow logs
|
||||
follow:
|
||||
@if [ -f logs/service.log ]; then \
|
||||
echo "📝 Following logs (Ctrl+C to exit):"; \
|
||||
tail -f logs/service.log; \
|
||||
else \
|
||||
echo "❌ No log file found"; \
|
||||
fi
|
||||
|
||||
# Show help
|
||||
help:
|
||||
@echo "Gitea Webhook Ambassador (Python) - Makefile commands:"
|
||||
@echo " install - Install Python dependencies"
|
||||
@echo " init - Initialize database"
|
||||
@echo " build - Install dependencies (alias for install)"
|
||||
@echo " clean - Remove build artifacts and logs"
|
||||
@echo " test - Run tests"
|
||||
@echo " lint - Run linter"
|
||||
@echo " docker-build - Build Docker image"
|
||||
@echo " docker-push - Build and push Docker image to registry"
|
||||
@echo " run - Install, init and run locally (foreground)"
|
||||
@echo " start - Install, init and start in background"
|
||||
@echo " stop - Stop background service"
|
||||
@echo " restart - Restart background service"
|
||||
@echo " status - Show service status"
|
||||
@echo " logs - Show latest logs"
|
||||
@echo " follow - Follow logs in real-time"
|
||||
@echo " help - Show this help message"
|
||||
@ -1,189 +0,0 @@
|
||||
# Gitea Webhook Ambassador (Python)
|
||||
|
||||
A high-performance Python webhook service for connecting Gitea and Jenkins, supporting intelligent dispatch, high concurrency processing, and deduplication strategies.
|
||||
|
||||
## 🚀 Features
|
||||
|
||||
### 1. Intelligent Dispatch Strategy
|
||||
- **dev branches** → Trigger alpha environment build
|
||||
- **prod branches** → Trigger production environment build
|
||||
- **other branches** → Configurable default strategy
|
||||
|
||||
### 2. High Concurrency Processing
|
||||
- **Asynchronous Task Queue**: Use Celery + Redis for high concurrency
|
||||
- **Task Queueing Mechanism**: Prevent build loss, ensure tasks are executed in order
|
||||
- **Load Balancing**: Support multiple worker instances
|
||||
|
||||
### 3. Deduplication Strategy
|
||||
- **Deduplication based on commit hash + branch**: Prevent repeated triggers
|
||||
- **Time Window Deduplication**: Only trigger once for the same commit within a specified time window
|
||||
- **Intelligent Deduplication**: Support configurable deduplication strategies
|
||||
|
||||
## 🏗️ Architecture Design
|
||||
|
||||
```
|
||||
Gitea Webhook → FastAPI → Celery Queue → Jenkins Workers
|
||||
↓ ↓ ↓ ↓
|
||||
Signature Verification Routing Dispatch Task Queueing Concurrent Execution
|
||||
↓ ↓ ↓ ↓
|
||||
Deduplication Environment Judgment Persistent Storage Status Feedback
|
||||
```
|
||||
|
||||
## 📁 Project Structure
|
||||
|
||||
```
|
||||
gitea-webhook-ambassador-python/
|
||||
├── app/
|
||||
│ ├── __init__.py
|
||||
│ ├── main.py # FastAPI application entry
|
||||
│ ├── config.py # Configuration management
|
||||
│ ├── models/ # Data models
|
||||
│ │ ├── __init__.py
|
||||
│ │ ├── gitea.py # Gitea webhook model
|
||||
│ │ └── jenkins.py # Jenkins job model
|
||||
│ ├── services/ # Business logic
|
||||
│ │ ├── __init__.py
|
||||
│ │ ├── webhook_service.py # Webhook processing service
|
||||
│ │ ├── jenkins_service.py # Jenkins integration service
|
||||
│ │ ├── queue_service.py # Queue management service
|
||||
│ │ └── dedup_service.py # Deduplication service
|
||||
│ ├── api/ # API routes
|
||||
│ │ ├── __init__.py
|
||||
│ │ ├── webhook.py # Webhook endpoint
|
||||
│ │ ├── health.py # Health check
|
||||
│ │ └── admin.py # Admin interface
|
||||
│ ├── core/ # Core components
|
||||
│ │ ├── __init__.py
|
||||
│ │ ├── security.py # Security validation
|
||||
│ │ ├── database.py # Database connection
|
||||
│ │ └── cache.py # Cache management
|
||||
│ └── tasks/ # Celery tasks
|
||||
│ ├── __init__.py
|
||||
│ └── jenkins_tasks.py # Jenkins task processing
|
||||
├── tests/ # Test files
|
||||
├── docker/ # Docker configuration
|
||||
├── requirements.txt # Python dependencies
|
||||
├── docker-compose.yml # Development environment
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## 🛠️ Tech Stack
|
||||
|
||||
- **Web Framework**: FastAPI
|
||||
- **Task Queue**: Celery + Redis
|
||||
- **Database**: PostgreSQL (production) / SQLite (development)
|
||||
- **Cache**: Redis
|
||||
- **Monitoring**: Prometheus + Grafana
|
||||
- **Logging**: Structured logging with JSON
|
||||
- **Testing**: pytest + pytest-asyncio
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### 1. Install Dependencies
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
### 2. Configure Environment
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# Edit the .env file to configure Jenkins and database connections
|
||||
```
|
||||
|
||||
### 3. Start Service
|
||||
```bash
|
||||
# Start Redis
|
||||
docker run -d -p 6379:6379 redis:alpine
|
||||
|
||||
# Start Celery worker
|
||||
celery -A app.tasks worker --loglevel=info
|
||||
|
||||
# Start FastAPI application
|
||||
uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
## 📋 Configuration
|
||||
|
||||
### Environment Dispatch Configuration
|
||||
```yaml
|
||||
environments:
|
||||
dev:
|
||||
branches: ["dev", "develop", "development"]
|
||||
jenkins_job: "alpha-build"
|
||||
jenkins_url: "https://jenkins-alpha.example.com"
|
||||
prod:
|
||||
branches: ["prod", "production", "main", "master"]
|
||||
jenkins_job: "production-build"
|
||||
jenkins_url: "https://jenkins-prod.example.com"
|
||||
default:
|
||||
jenkins_job: "default-build"
|
||||
jenkins_url: "https://jenkins-default.example.com"
|
||||
```
|
||||
|
||||
### Deduplication Configuration
|
||||
```yaml
|
||||
deduplication:
|
||||
enabled: true
|
||||
window_seconds: 300 # 5-minute deduplication window
|
||||
strategy: "commit_branch" # commit_hash + branch
|
||||
cache_ttl: 3600 # Cache for 1 hour
|
||||
```
|
||||
|
||||
### Queue Configuration
|
||||
```yaml
|
||||
queue:
|
||||
max_concurrent: 10
|
||||
max_retries: 3
|
||||
retry_delay: 60 # seconds
|
||||
priority_levels: 3
|
||||
```
|
||||
|
||||
## 🔧 API Endpoints
|
||||
|
||||
### Webhook Endpoint
|
||||
```
|
||||
POST /webhook/gitea
|
||||
```
|
||||
|
||||
### Health Check
|
||||
```
|
||||
GET /health
|
||||
GET /health/queue
|
||||
GET /health/jenkins
|
||||
```
|
||||
|
||||
### Admin Endpoints
|
||||
```
|
||||
GET /admin/queue/status
|
||||
GET /admin/queue/stats
|
||||
POST /admin/queue/clear
|
||||
```
|
||||
|
||||
## 🧪 Testing
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
pytest
|
||||
|
||||
# Run specific test
|
||||
pytest tests/test_webhook_service.py
|
||||
|
||||
# Run performance test
|
||||
pytest tests/test_performance.py
|
||||
```
|
||||
|
||||
## 📊 Monitoring Metrics
|
||||
|
||||
- Webhook receive rate
|
||||
- Task queue length
|
||||
- Jenkins build success rate
|
||||
- Response time distribution
|
||||
- Deduplication hit rate
|
||||
|
||||
## 🔒 Security Features
|
||||
|
||||
- Webhook signature verification
|
||||
- API key authentication
|
||||
- Request rate limiting
|
||||
- Input validation and sanitization
|
||||
- Secure logging
|
||||
@ -1,339 +0,0 @@
|
||||
# Gitea Webhook Ambassador (Python Enhanced Version)
|
||||
|
||||
This is a Gitea Webhook Ambassador service rewritten in Python, providing the same features as the Go version, but with an added web interface and more management features.
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Method 1: Use the devbox script (recommended, same as Go version)
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
./devbox install
|
||||
|
||||
# Initialize database
|
||||
./devbox init
|
||||
|
||||
# Start the service
|
||||
./devbox start
|
||||
|
||||
# Check status
|
||||
./devbox status
|
||||
|
||||
# View logs
|
||||
./devbox logs
|
||||
|
||||
# Stop the service
|
||||
./devbox stop
|
||||
```
|
||||
|
||||
### Method 2: Use Makefile
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
make install
|
||||
|
||||
# Initialize database
|
||||
make init
|
||||
|
||||
# Start the service (foreground)
|
||||
make run
|
||||
|
||||
# Start the service (background)
|
||||
make start
|
||||
|
||||
# Check status
|
||||
make status
|
||||
|
||||
# View logs
|
||||
make logs
|
||||
|
||||
# Stop the service
|
||||
make stop
|
||||
```
|
||||
|
||||
### Method 3: Use Python directly
|
||||
|
||||
```bash
|
||||
# Create virtual environment
|
||||
python3 -m venv venv
|
||||
source venv/bin/activate
|
||||
|
||||
# Install dependencies
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Initialize database
|
||||
python -c "from app.models.database import create_tables; create_tables()"
|
||||
|
||||
# Start the service
|
||||
python -m uvicorn app.main_enhanced:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
## 📁 Directory Structure (same as Go version)
|
||||
|
||||
```
|
||||
gitea-webhook-ambassador-python/
|
||||
├── app/ # Application code
|
||||
│ ├── auth/ # Authentication module
|
||||
│ ├── handlers/ # API handlers
|
||||
│ ├── models/ # Data models
|
||||
│ ├── templates/ # HTML templates
|
||||
│ ├── static/ # Static files
|
||||
│ └── main_enhanced.py # Main application entry
|
||||
├── cmd/ # CLI tools (same as Go version)
|
||||
│ └── server/ # Server startup
|
||||
├── configs/ # Configuration files (same as Go version)
|
||||
│ └── config.yaml # Main configuration file
|
||||
├── data/ # Data directory (same as Go version)
|
||||
│ └── *.db # SQLite database files
|
||||
├── logs/ # Log directory (same as Go version)
|
||||
│ └── service.log # Service log
|
||||
├── devbox # Startup script (same as Go version)
|
||||
├── Makefile # Build script (same as Go version)
|
||||
├── requirements.txt # Python dependencies
|
||||
└── README_ENHANCED.md # This document
|
||||
```
|
||||
|
||||
## 🔧 Configuration
|
||||
|
||||
Edit the `configs/config.yaml` file:
|
||||
|
||||
```yaml
|
||||
server:
|
||||
port: 8000
|
||||
webhookPath: "/webhook"
|
||||
secretHeader: "X-Gitea-Signature"
|
||||
secretKey: "admin-secret-key-change-in-production"
|
||||
|
||||
jenkins:
|
||||
url: "http://jenkins.example.com"
|
||||
username: "jenkins-user"
|
||||
token: "jenkins-api-token"
|
||||
timeout: 30
|
||||
|
||||
admin:
|
||||
token: "admin-api-token"
|
||||
|
||||
database:
|
||||
path: "data/gitea-webhook-ambassador.db"
|
||||
|
||||
logging:
|
||||
level: "info"
|
||||
format: "text"
|
||||
file: "logs/service.log"
|
||||
|
||||
worker:
|
||||
poolSize: 10
|
||||
queueSize: 100
|
||||
maxRetries: 3
|
||||
retryBackoff: 1
|
||||
|
||||
eventCleanup:
|
||||
interval: 3600
|
||||
expireAfter: 7200
|
||||
```
|
||||
|
||||
## 🌐 Web Interface
|
||||
|
||||
After starting the service, visit the following addresses:
|
||||
|
||||
- **Login page**: http://localhost:8000
|
||||
- **Dashboard**: http://localhost:8000/dashboard
|
||||
- **API Docs**: http://localhost:8000/docs
|
||||
|
||||
### Default login credentials
|
||||
- **Username**: admin
|
||||
- **Password**: admin-secret-key-change-in-production
|
||||
|
||||
## 📊 Features
|
||||
|
||||
### ✅ Same features as Go version
|
||||
- Gitea Webhook receiving and processing
|
||||
- Jenkins job triggering
|
||||
- Project mapping configuration
|
||||
- Branch pattern matching
|
||||
- Retry mechanism
|
||||
- Logging
|
||||
|
||||
### 🆕 Python version enhancements
|
||||
- **Web login interface**: Modern UI based on Bootstrap 5
|
||||
- **Database storage**: SQLite database for API keys and configuration
|
||||
- **JWT authentication**: 7-day valid JWT tokens
|
||||
- **Frontend dashboard**: Multi-tab management interface
|
||||
- **Auto redirect**: Unauthenticated users are redirected to login
|
||||
- **Health check**: Service status monitoring
|
||||
- **Statistics**: Request statistics and performance metrics
|
||||
|
||||
## 🔌 API Endpoints
|
||||
|
||||
### Authentication
|
||||
- `POST /api/auth/login` - User login
|
||||
- `GET /api/auth/verify` - Verify JWT token
|
||||
|
||||
### Project Management
|
||||
- `GET /api/projects` - Get project list
|
||||
- `POST /api/projects` - Create new project
|
||||
- `PUT /api/projects/{id}` - Update project
|
||||
- `DELETE /api/projects/{id}` - Delete project
|
||||
|
||||
### API Key Management
|
||||
- `GET /api/keys` - Get API key list
|
||||
- `POST /api/keys` - Create new API key
|
||||
- `DELETE /api/keys/{id}` - Delete API key
|
||||
|
||||
### System Monitoring
|
||||
- `GET /api/health` - Health check
|
||||
- `GET /api/stats` - Statistics
|
||||
- `GET /api/logs` - View logs
|
||||
|
||||
### Webhook Handling
|
||||
- `POST /webhook` - Gitea Webhook endpoint
|
||||
|
||||
## 🛠️ Development
|
||||
|
||||
### Run tests
|
||||
```bash
|
||||
# Use devbox
|
||||
./devbox test
|
||||
|
||||
# Use Makefile
|
||||
make test
|
||||
|
||||
# Run directly
|
||||
python test_enhanced_features.py
|
||||
```
|
||||
|
||||
### Code linting
|
||||
```bash
|
||||
# Use Makefile
|
||||
make lint
|
||||
|
||||
# Run directly
|
||||
flake8 app/ --max-line-length=120 --ignore=E501,W503
|
||||
```
|
||||
|
||||
### Clean up
|
||||
```bash
|
||||
# Use devbox
|
||||
./devbox clean
|
||||
|
||||
# Use Makefile
|
||||
make clean
|
||||
```
|
||||
|
||||
## 🐳 Docker Deployment
|
||||
|
||||
### Build image
|
||||
```bash
|
||||
# Use Makefile
|
||||
make docker-build
|
||||
|
||||
# Build directly
|
||||
docker build -t gitea-webhook-ambassador:latest .
|
||||
```
|
||||
|
||||
### Run container
|
||||
```bash
|
||||
docker run -d \
|
||||
--name gitea-webhook-ambassador \
|
||||
-p 8000:8000 \
|
||||
-v $(pwd)/configs:/app/configs \
|
||||
-v $(pwd)/data:/app/data \
|
||||
-v $(pwd)/logs:/app/logs \
|
||||
gitea-webhook-ambassador:latest
|
||||
```
|
||||
|
||||
## 📈 Comparison with Go Version
|
||||
|
||||
| Feature | Go Version | Python Version |
|
||||
|---------|------------|---------------|
|
||||
| **Startup** | `./devbox start` | `./devbox start` |
|
||||
| **Directory Structure** | Standard Go project | Same as Go version |
|
||||
| **Config File** | `configs/config.yaml` | `configs/config.yaml` |
|
||||
| **Log Directory** | `logs/` | `logs/` |
|
||||
| **Data Directory** | `data/` | `data/` |
|
||||
| **Web Interface** | ❌ No | ✅ Full dashboard |
|
||||
| **Database** | ❌ No | ✅ SQLite |
|
||||
| **JWT Auth** | ❌ No | ✅ 7-day validity |
|
||||
| **API Key Management** | ❌ No | ✅ Database storage |
|
||||
| **Health Check** | ✅ Basic | ✅ Enhanced |
|
||||
| **Performance** | 🚀 Very high | 🚀 High |
|
||||
|
||||
## 🔄 Migration Guide
|
||||
|
||||
### Migrate from Go version to Python version
|
||||
|
||||
1. **Stop Go service**
|
||||
```bash
|
||||
cd /path/to/go-version
|
||||
./devbox stop
|
||||
```
|
||||
|
||||
2. **Start Python service**
|
||||
```bash
|
||||
cd /path/to/python-version
|
||||
./devbox install
|
||||
./devbox init
|
||||
./devbox start
|
||||
```
|
||||
|
||||
3. **Verify service**
|
||||
```bash
|
||||
./devbox status
|
||||
curl http://localhost:8000/api/health
|
||||
```
|
||||
|
||||
4. **Configure Webhook**
|
||||
- Update Gitea Webhook URL to the new Python service address
|
||||
- Ensure Jenkins configuration is correct
|
||||
|
||||
## 🆘 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**1. Port 8000 is occupied**
|
||||
```bash
|
||||
# Check port usage
|
||||
lsof -i :8000
|
||||
|
||||
# Stop the occupying process
|
||||
sudo kill -9 <PID>
|
||||
```
|
||||
|
||||
**2. Virtual environment issues**
|
||||
```bash
|
||||
# Recreate virtual environment
|
||||
rm -rf venv
|
||||
./devbox install
|
||||
```
|
||||
|
||||
**3. Database issues**
|
||||
```bash
|
||||
# Reinitialize database
|
||||
./devbox init
|
||||
```
|
||||
|
||||
**4. Permission issues**
|
||||
```bash
|
||||
# Set script permissions
|
||||
chmod +x devbox
|
||||
```
|
||||
|
||||
### View logs
|
||||
```bash
|
||||
# View real-time logs
|
||||
./devbox follow
|
||||
|
||||
# View latest logs
|
||||
./devbox logs
|
||||
|
||||
# View full logs
|
||||
tail -f logs/service.log
|
||||
```
|
||||
|
||||
## 📞 Support
|
||||
|
||||
If you have any issues, please check:
|
||||
1. Service status: `./devbox status`
|
||||
2. Log information: `./devbox logs`
|
||||
3. Configuration file: `configs/config.yaml`
|
||||
4. Network connection: `curl http://localhost:8000/api/health`
|
||||
@ -1,258 +0,0 @@
|
||||
# 🚀 Gitea Webhook Ambassador Usage Guide
|
||||
|
||||
## 📋 Table of Contents
|
||||
1. [Quick Start](#quick-start)
|
||||
2. [Configuration](#configuration)
|
||||
3. [API Endpoints](#api-endpoints)
|
||||
4. [Database Management](#database-management)
|
||||
5. [Monitoring and Logs](#monitoring-and-logs)
|
||||
6. [Troubleshooting](#troubleshooting)
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### 1. Environment Preparation
|
||||
|
||||
```bash
|
||||
# Clone the project
|
||||
git clone https://your.repo.url/gitea-webhook-ambassador-python.git
|
||||
cd gitea-webhook-ambassador-python
|
||||
|
||||
# Run quick setup script
|
||||
./devbox install
|
||||
```
|
||||
|
||||
### 2. Configure Environment
|
||||
|
||||
Edit the `.env` file to set required parameters:
|
||||
|
||||
```bash
|
||||
# Edit configuration file
|
||||
cp .env.example .env
|
||||
vim .env
|
||||
```
|
||||
|
||||
**Required configuration:**
|
||||
|
||||
```bash
|
||||
# Jenkins configuration
|
||||
JENKINS_USERNAME=your_jenkins_username
|
||||
JENKINS_TOKEN=your_jenkins_token
|
||||
|
||||
# Security configuration
|
||||
SECURITY_SECRET_KEY=your_secret_key
|
||||
```
|
||||
|
||||
### 3. Start Service
|
||||
|
||||
```bash
|
||||
# Method 1: Use the startup script
|
||||
./devbox start
|
||||
|
||||
# Method 2: Manual startup
|
||||
# Start Redis
|
||||
docker run -d -p 6379:6379 redis:alpine
|
||||
|
||||
# Activate virtual environment
|
||||
source venv/bin/activate
|
||||
|
||||
# Start API service
|
||||
python -m uvicorn app.main_enhanced:app --host 0.0.0.0 --port 8000
|
||||
|
||||
# Start Celery worker in a new terminal
|
||||
celery -A app.tasks.jenkins_tasks worker --loglevel=info
|
||||
|
||||
# Start scheduled tasks in a new terminal
|
||||
celery -A app.tasks.jenkins_tasks beat --loglevel=info
|
||||
```
|
||||
|
||||
### 4. Verify Installation
|
||||
|
||||
Visit the following addresses to verify the service:
|
||||
- **API Docs**: http://localhost:8000/docs
|
||||
- **Health Check**: http://localhost:8000/health
|
||||
- **Metrics**: http://localhost:8000/metrics
|
||||
|
||||
## ⚙️ Configuration
|
||||
|
||||
### Environment Dispatch Configuration
|
||||
|
||||
Edit the `config/environments.yaml` file:
|
||||
|
||||
```yaml
|
||||
environments:
|
||||
dev:
|
||||
branches: ["dev", "develop", "development", "feature/*"]
|
||||
jenkins_job: "alpha-build"
|
||||
jenkins_url: "https://jenkins-alpha.freeleaps.com"
|
||||
priority: 2
|
||||
prod:
|
||||
branches: ["prod", "production", "main", "master", "release/*"]
|
||||
jenkins_job: "production-build"
|
||||
jenkins_url: "https://jenkins-prod.freeleaps.com"
|
||||
priority: 1
|
||||
staging:
|
||||
branches: ["staging", "stage", "pre-prod"]
|
||||
jenkins_job: "staging-build"
|
||||
jenkins_url: "https://jenkins-staging.freeleaps.com"
|
||||
priority: 3
|
||||
default:
|
||||
branches: ["*"]
|
||||
jenkins_job: "default-build"
|
||||
jenkins_url: "https://jenkins-default.freeleaps.com"
|
||||
priority: 4
|
||||
```
|
||||
|
||||
### Deduplication Configuration
|
||||
|
||||
```yaml
|
||||
deduplication:
|
||||
enabled: true
|
||||
window_seconds: 300 # 5-minute deduplication window
|
||||
strategy: "commit_branch"
|
||||
cache_ttl: 3600 # Cache for 1 hour
|
||||
```
|
||||
|
||||
## 🔧 API Endpoints
|
||||
|
||||
### Webhook Endpoint
|
||||
|
||||
Receive Gitea webhook events:
|
||||
|
||||
```http
|
||||
POST /webhook/gitea
|
||||
```
|
||||
|
||||
### Health Check
|
||||
|
||||
```http
|
||||
GET /health
|
||||
GET /health/queue
|
||||
GET /health/jenkins
|
||||
```
|
||||
|
||||
Example response:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "healthy",
|
||||
"service": "Gitea Webhook Ambassador",
|
||||
"version": "1.0.0",
|
||||
"timestamp": "2023-01-01T00:00:00Z",
|
||||
"jenkins": {"status": "connected"},
|
||||
"worker_pool": {"active_workers": 2, "queue_size": 0, "total_processed": 10, "total_failed": 1},
|
||||
"database": {"status": "connected"}
|
||||
}
|
||||
```
|
||||
|
||||
### Queue Status
|
||||
|
||||
```http
|
||||
GET /admin/queue/status
|
||||
```
|
||||
|
||||
Example response:
|
||||
|
||||
```json
|
||||
{
|
||||
"active_tasks": 1,
|
||||
"queued_tasks": 2,
|
||||
"worker_count": 2,
|
||||
"queue_length": 3
|
||||
}
|
||||
```
|
||||
|
||||
### Monitoring Metrics
|
||||
|
||||
Returns Prometheus-formatted monitoring metrics.
|
||||
|
||||
## 🗄️ Database Management
|
||||
|
||||
### Create Project Mapping
|
||||
|
||||
Use a Python script to create a project mapping:
|
||||
|
||||
```python
|
||||
from app.services.database_service import get_database_service
|
||||
import asyncio
|
||||
|
||||
db_service = get_database_service()
|
||||
mapping_data = {
|
||||
"repository_name": "freeleaps/test-project",
|
||||
"default_job": "test-project-build",
|
||||
"branch_jobs": [
|
||||
{"branch_name": "dev", "job_name": "test-project-dev"},
|
||||
{"branch_name": "staging", "job_name": "test-project-staging"}
|
||||
],
|
||||
"branch_patterns": [
|
||||
{"pattern": "feature/*", "job_name": "test-project-feature"},
|
||||
{"pattern": "hotfix/*", "job_name": "test-project-hotfix"}
|
||||
]
|
||||
}
|
||||
|
||||
success = asyncio.run(db_service.create_project_mapping(mapping_data))
|
||||
print(f"Mapping created: {'Success' if success else 'Failed'}")
|
||||
```
|
||||
|
||||
Run the script:
|
||||
|
||||
```bash
|
||||
python create_mapping.py
|
||||
```
|
||||
|
||||
### View Trigger Logs
|
||||
|
||||
Refer to the API documentation for log query endpoints.
|
||||
|
||||
## 📊 Monitoring and Logs
|
||||
|
||||
### View Logs
|
||||
|
||||
```bash
|
||||
# View application logs
|
||||
tail -n 50 logs/service.log
|
||||
|
||||
# View Celery logs
|
||||
tail -n 50 logs/celery.log
|
||||
```
|
||||
|
||||
### Monitoring Dashboard
|
||||
|
||||
Use Grafana to create a monitoring dashboard:
|
||||
1. Visit http://localhost:3000 (Grafana)
|
||||
2. Username: `admin`, Password: `admin`
|
||||
3. Add Prometheus data source: http://prometheus:9090
|
||||
4. Import monitoring dashboard
|
||||
|
||||
### Key Metrics
|
||||
- **webhook_requests_total**: Total webhook requests
|
||||
- **webhook_request_duration_seconds**: Request response time
|
||||
- **queue_size**: Queue length
|
||||
- **dedup_hits_total**: Deduplication hit count
|
||||
|
||||
## 🔧 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### 1. Redis Connection Failure
|
||||
|
||||
```bash
|
||||
# Check Redis status
|
||||
docker ps | grep redis
|
||||
|
||||
# Restart Redis
|
||||
docker restart webhook-ambassador-redis
|
||||
```
|
||||
|
||||
#### 2. Celery Worker Fails to Start
|
||||
|
||||
```bash
|
||||
# Check Celery configuration
|
||||
cat .env
|
||||
|
||||
# Restart Worker
|
||||
celery -A app.tasks.jenkins_tasks worker --loglevel=info
|
||||
```
|
||||
|
||||
#### 3. Jenkins Connection Failure
|
||||
|
||||
Check Jenkins URL, username, and token in the configuration file.
|
||||
@ -1,95 +0,0 @@
|
||||
from fastapi import HTTPException, Depends, status
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
from fastapi.responses import JSONResponse, RedirectResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timedelta
|
||||
import jwt
|
||||
import secrets
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from ..models.database import get_db, APIKey
|
||||
from ..config import settings
|
||||
|
||||
# JWT configuration
|
||||
JWT_SECRET_KEY = os.getenv("JWT_SECRET_KEY", "your-secret-key-change-in-production")
|
||||
JWT_ALGORITHM = "HS256"
|
||||
JWT_EXPIRATION_HOURS = 24 * 7 # 7 days expiration
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
class AuthMiddleware:
|
||||
def __init__(self):
|
||||
self.secret_key = JWT_SECRET_KEY
|
||||
|
||||
def create_access_token(self, data: dict, expires_delta: Optional[timedelta] = None):
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
else:
|
||||
expire = datetime.utcnow() + timedelta(hours=JWT_EXPIRATION_HOURS)
|
||||
|
||||
to_encode.update({"exp": expire})
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=JWT_ALGORITHM)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(self, token: str):
|
||||
# Allow 'test-token' as a valid token for testing
|
||||
if token == "test-token":
|
||||
return {"sub": "test", "role": "admin"}
|
||||
# Check database for API key
|
||||
from app.models.database import get_db, APIKey
|
||||
db = next(get_db())
|
||||
api_key = db.query(APIKey).filter(APIKey.key == token).first()
|
||||
if api_key:
|
||||
return {"sub": api_key.description or "api_key", "role": "api_key"}
|
||||
# Try JWT
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[JWT_ALGORITHM])
|
||||
return payload
|
||||
except jwt.PyJWTError:
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
|
||||
def verify_api_key(self, api_key: str, db: Session):
|
||||
"""Validate API key"""
|
||||
db_key = db.query(APIKey).filter(APIKey.key == api_key).first()
|
||||
return db_key is not None
|
||||
|
||||
def generate_api_key(self) -> str:
|
||||
"""Generate a new API key"""
|
||||
return secrets.token_urlsafe(32)
|
||||
|
||||
# Create authentication middleware instance
|
||||
auth_middleware = AuthMiddleware()
|
||||
|
||||
async def get_current_user(credentials: HTTPAuthorizationCredentials = Depends(security)):
|
||||
"""Get current user (JWT authentication)"""
|
||||
token = credentials.credentials
|
||||
payload = auth_middleware.verify_token(token)
|
||||
return payload
|
||||
|
||||
async def get_current_user_api_key(api_key: str = Depends(security), db: Session = Depends(get_db)):
|
||||
"""Get current user (API key authentication)"""
|
||||
if not auth_middleware.verify_api_key(api_key.credentials, db):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid API key"
|
||||
)
|
||||
return {"api_key": api_key.credentials}
|
||||
|
||||
def require_auth(use_api_key: bool = False):
|
||||
"""Authentication dependency decorator"""
|
||||
if use_api_key:
|
||||
return get_current_user_api_key
|
||||
else:
|
||||
return get_current_user
|
||||
|
||||
def handle_auth_error(request, exc):
|
||||
"""Handle authentication error"""
|
||||
if request.headers.get("x-requested-with") == "XMLHttpRequest":
|
||||
return JSONResponse(
|
||||
status_code=401,
|
||||
content={"error": "Invalid or expired token"}
|
||||
)
|
||||
else:
|
||||
return RedirectResponse(url="/login", status_code=303)
|
||||
@ -1,189 +0,0 @@
|
||||
"""
|
||||
Configuration management module
|
||||
Supports environment dispatch, deduplication strategy, and queue configuration
|
||||
"""
|
||||
|
||||
from typing import Dict, List, Optional
|
||||
from pydantic import Field, validator
|
||||
from pydantic_settings import BaseSettings
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class EnvironmentConfig(BaseSettings):
|
||||
"""Environment configuration"""
|
||||
branches: List[str] = Field(default_factory=list)
|
||||
jenkins_job: str
|
||||
jenkins_url: str
|
||||
priority: int = Field(default=1, ge=1, le=10)
|
||||
|
||||
|
||||
class DeduplicationConfig(BaseSettings):
|
||||
"""Deduplication configuration"""
|
||||
enabled: bool = True
|
||||
window_seconds: int = Field(default=300, ge=1) # 5-minute deduplication window
|
||||
strategy: str = Field(default="commit_branch") # commit_hash + branch
|
||||
cache_ttl: int = Field(default=3600, ge=1) # Cache for 1 hour
|
||||
|
||||
|
||||
class QueueConfig(BaseSettings):
|
||||
"""Queue configuration"""
|
||||
max_concurrent: int = Field(default=10, ge=1)
|
||||
max_retries: int = Field(default=3, ge=0)
|
||||
retry_delay: int = Field(default=60, ge=1) # seconds
|
||||
priority_levels: int = Field(default=3, ge=1, le=10)
|
||||
|
||||
|
||||
class JenkinsConfig(BaseSettings):
|
||||
"""Jenkins configuration"""
|
||||
username: str
|
||||
token: str
|
||||
timeout: int = Field(default=30, ge=1)
|
||||
retry_attempts: int = Field(default=3, ge=1)
|
||||
|
||||
|
||||
class DatabaseConfig(BaseSettings):
|
||||
"""Database configuration"""
|
||||
url: str = Field(default="sqlite:///./webhook_ambassador.db")
|
||||
echo: bool = False
|
||||
pool_size: int = Field(default=10, ge=1)
|
||||
max_overflow: int = Field(default=20, ge=0)
|
||||
|
||||
|
||||
class RedisConfig(BaseSettings):
|
||||
"""Redis configuration"""
|
||||
url: str = Field(default="redis://localhost:6379/0")
|
||||
password: Optional[str] = None
|
||||
db: int = Field(default=0, ge=0)
|
||||
|
||||
|
||||
class LoggingConfig(BaseSettings):
|
||||
"""Logging configuration"""
|
||||
level: str = Field(default="INFO")
|
||||
format: str = Field(default="json")
|
||||
file: Optional[str] = None
|
||||
|
||||
|
||||
class SecurityConfig(BaseSettings):
|
||||
"""Security configuration"""
|
||||
secret_key: str
|
||||
webhook_secret_header: str = Field(default="X-Gitea-Signature")
|
||||
rate_limit_per_minute: int = Field(default=100, ge=1)
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Main configuration class"""
|
||||
|
||||
# Basic configuration
|
||||
app_name: str = "Gitea Webhook Ambassador"
|
||||
version: str = "1.0.0"
|
||||
debug: bool = False
|
||||
|
||||
# Server configuration
|
||||
host: str = "0.0.0.0"
|
||||
port: int = Field(default=8000, ge=1, le=65535)
|
||||
|
||||
# Database configuration
|
||||
database_url: str = Field(default="sqlite:///./webhook_ambassador.db")
|
||||
|
||||
# Redis configuration
|
||||
redis_url: str = Field(default="redis://localhost:6379/0")
|
||||
redis_password: str = Field(default="")
|
||||
redis_db: int = Field(default=0)
|
||||
|
||||
# Jenkins configuration
|
||||
jenkins_username: str = Field(default="admin")
|
||||
jenkins_token: str = Field(default="")
|
||||
jenkins_timeout: int = Field(default=30)
|
||||
|
||||
# Security configuration
|
||||
security_secret_key: str = Field(default="")
|
||||
security_webhook_secret_header: str = Field(default="X-Gitea-Signature")
|
||||
security_rate_limit_per_minute: int = Field(default=100)
|
||||
|
||||
# Logging configuration
|
||||
logging_level: str = Field(default="INFO")
|
||||
logging_format: str = Field(default="json")
|
||||
logging_file: str = Field(default="")
|
||||
|
||||
# Queue configuration
|
||||
queue_max_concurrent: int = Field(default=10)
|
||||
queue_max_retries: int = Field(default=3)
|
||||
queue_retry_delay: int = Field(default=60)
|
||||
queue_priority_levels: int = Field(default=3)
|
||||
|
||||
# Deduplication configuration
|
||||
deduplication_enabled: bool = Field(default=True)
|
||||
deduplication_window_seconds: int = Field(default=300)
|
||||
deduplication_strategy: str = Field(default="commit_branch")
|
||||
deduplication_cache_ttl: int = Field(default=3600)
|
||||
|
||||
# Business configuration
|
||||
environments: Dict[str, EnvironmentConfig] = Field(default_factory=dict)
|
||||
deduplication: DeduplicationConfig = DeduplicationConfig()
|
||||
queue: QueueConfig = QueueConfig()
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
env_nested_delimiter = "__"
|
||||
|
||||
@validator("environments", pre=True)
|
||||
def load_environments_from_file(cls, v):
|
||||
"""Load environment configuration from file"""
|
||||
if isinstance(v, dict) and v:
|
||||
return v
|
||||
|
||||
# Try to load from config file
|
||||
config_file = Path("config/environments.yaml")
|
||||
if config_file.exists():
|
||||
with open(config_file, "r", encoding="utf-8") as f:
|
||||
config_data = yaml.safe_load(f)
|
||||
return config_data.get("environments", {})
|
||||
|
||||
# Default configuration
|
||||
return {
|
||||
"dev": EnvironmentConfig(
|
||||
branches=["dev", "develop", "development"],
|
||||
jenkins_job="alpha-build",
|
||||
jenkins_url="https://jenkins-alpha.example.com",
|
||||
priority=2
|
||||
),
|
||||
"prod": EnvironmentConfig(
|
||||
branches=["prod", "production", "main", "master"],
|
||||
jenkins_job="production-build",
|
||||
jenkins_url="https://jenkins-prod.example.com",
|
||||
priority=1
|
||||
),
|
||||
"default": EnvironmentConfig(
|
||||
branches=["*"],
|
||||
jenkins_job="default-build",
|
||||
jenkins_url="https://jenkins-default.example.com",
|
||||
priority=3
|
||||
)
|
||||
}
|
||||
|
||||
def get_environment_for_branch(self, branch: str) -> Optional[EnvironmentConfig]:
|
||||
"""Get environment configuration by branch name"""
|
||||
for env_name, env_config in self.environments.items():
|
||||
if branch in env_config.branches or "*" in env_config.branches:
|
||||
return env_config
|
||||
return None
|
||||
|
||||
def get_environment_by_name(self, name: str) -> Optional[EnvironmentConfig]:
|
||||
"""Get configuration by environment name"""
|
||||
return self.environments.get(name)
|
||||
|
||||
|
||||
# Global configuration instance
|
||||
settings = Settings()
|
||||
|
||||
|
||||
def get_settings() -> Settings:
|
||||
"""Get configuration instance"""
|
||||
return settings
|
||||
|
||||
|
||||
def reload_settings():
|
||||
"""Reload configuration"""
|
||||
global settings
|
||||
settings = Settings()
|
||||
@ -1,10 +0,0 @@
|
||||
"""
|
||||
Handlers package
|
||||
Contains all API handlers
|
||||
"""
|
||||
|
||||
from . import webhook
|
||||
from . import health
|
||||
from . import admin
|
||||
|
||||
__all__ = ["webhook", "health", "admin"]
|
||||
@ -1,287 +0,0 @@
|
||||
"""
|
||||
Admin API handler
|
||||
Provides project mapping and API key management features
|
||||
"""
|
||||
|
||||
import secrets
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.api_key import APIKey
|
||||
from app.models.project_mapping import ProjectMapping
|
||||
from app.auth import get_current_user
|
||||
|
||||
router = APIRouter(prefix="/api/admin", tags=["admin"])
|
||||
|
||||
# API key related models
|
||||
class APIKeyResponse(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
key_prefix: str
|
||||
created_at: datetime
|
||||
last_used: datetime
|
||||
is_active: bool
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
class CreateAPIKeyRequest(BaseModel):
|
||||
name: str
|
||||
|
||||
class CreateAPIKeyResponse(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
key: str
|
||||
created_at: datetime
|
||||
|
||||
# Project mapping related models
|
||||
class ProjectMappingRequest(BaseModel):
|
||||
repository_name: str
|
||||
default_job: str
|
||||
branch_jobs: Optional[List[dict]] = []
|
||||
branch_patterns: Optional[List[dict]] = []
|
||||
|
||||
class ProjectMappingResponse(BaseModel):
|
||||
id: int
|
||||
repository_name: str
|
||||
default_job: str
|
||||
branch_jobs: List[dict]
|
||||
branch_patterns: List[dict]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
# API key management endpoints
|
||||
@router.get("/api-keys", response_model=List[APIKeyResponse])
|
||||
async def list_api_keys(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""List all API keys"""
|
||||
try:
|
||||
api_keys = db.query(APIKey).order_by(APIKey.created_at.desc()).all()
|
||||
return api_keys
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list API keys: {str(e)}")
|
||||
|
||||
@router.post("/api-keys", response_model=CreateAPIKeyResponse)
|
||||
async def create_api_key(
|
||||
request: CreateAPIKeyRequest,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Create a new API key"""
|
||||
try:
|
||||
# Generate API key
|
||||
api_key = secrets.token_urlsafe(32)
|
||||
key_prefix = api_key[:8] # Show first 8 characters as prefix
|
||||
|
||||
# Create database record
|
||||
db_api_key = APIKey(
|
||||
name=request.name,
|
||||
key_hash=api_key, # Should be hashed in production
|
||||
key_prefix=key_prefix,
|
||||
created_at=datetime.utcnow(),
|
||||
last_used=datetime.utcnow(),
|
||||
is_active=True
|
||||
)
|
||||
|
||||
db.add(db_api_key)
|
||||
db.commit()
|
||||
db.refresh(db_api_key)
|
||||
|
||||
return CreateAPIKeyResponse(
|
||||
id=db_api_key.id,
|
||||
name=db_api_key.name,
|
||||
key=api_key, # Only return full key on creation
|
||||
created_at=db_api_key.created_at
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create API key: {str(e)}")
|
||||
|
||||
@router.delete("/api-keys/{key_id}")
|
||||
async def delete_api_key(
|
||||
key_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Delete API key"""
|
||||
try:
|
||||
api_key = db.query(APIKey).filter(APIKey.id == key_id).first()
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=404, detail="API key not found")
|
||||
|
||||
db.delete(api_key)
|
||||
db.commit()
|
||||
|
||||
return {"message": "API key deleted successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete API key: {str(e)}")
|
||||
|
||||
@router.post("/api-keys/{key_id}/revoke")
|
||||
async def revoke_api_key(
|
||||
key_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Revoke API key"""
|
||||
try:
|
||||
api_key = db.query(APIKey).filter(APIKey.id == key_id).first()
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=404, detail="API key not found")
|
||||
|
||||
api_key.is_active = False
|
||||
db.commit()
|
||||
|
||||
return {"message": "API key revoked successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to revoke API key: {str(e)}")
|
||||
|
||||
# Project mapping management endpoints
|
||||
@router.get("/projects", response_model=List[ProjectMappingResponse])
|
||||
async def list_project_mappings(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""List all project mappings"""
|
||||
try:
|
||||
mappings = db.query(ProjectMapping).order_by(ProjectMapping.created_at.desc()).all()
|
||||
return mappings
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list project mappings: {str(e)}")
|
||||
|
||||
@router.post("/projects", response_model=ProjectMappingResponse)
|
||||
async def create_project_mapping(
|
||||
request: ProjectMappingRequest,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Create project mapping"""
|
||||
try:
|
||||
# Check if already exists
|
||||
existing = db.query(ProjectMapping).filter(
|
||||
ProjectMapping.repository_name == request.repository_name
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
raise HTTPException(status_code=400, detail="Project mapping already exists")
|
||||
|
||||
# Create new mapping
|
||||
mapping = ProjectMapping(
|
||||
repository_name=request.repository_name,
|
||||
default_job=request.default_job,
|
||||
branch_jobs=request.branch_jobs or [],
|
||||
branch_patterns=request.branch_patterns or [],
|
||||
created_at=datetime.utcnow(),
|
||||
updated_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
db.add(mapping)
|
||||
db.commit()
|
||||
db.refresh(mapping)
|
||||
|
||||
return mapping
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create project mapping: {str(e)}")
|
||||
|
||||
@router.get("/projects/{repository_name}", response_model=ProjectMappingResponse)
|
||||
async def get_project_mapping(
|
||||
repository_name: str,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get project mapping"""
|
||||
try:
|
||||
mapping = db.query(ProjectMapping).filter(
|
||||
ProjectMapping.repository_name == repository_name
|
||||
).first()
|
||||
|
||||
if not mapping:
|
||||
raise HTTPException(status_code=404, detail="Project mapping not found")
|
||||
|
||||
return mapping
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get project mapping: {str(e)}")
|
||||
|
||||
@router.delete("/projects/{repository_name}")
|
||||
async def delete_project_mapping(
|
||||
repository_name: str,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Delete project mapping"""
|
||||
try:
|
||||
mapping = db.query(ProjectMapping).filter(
|
||||
ProjectMapping.repository_name == repository_name
|
||||
).first()
|
||||
|
||||
if not mapping:
|
||||
raise HTTPException(status_code=404, detail="Project mapping not found")
|
||||
|
||||
db.delete(mapping)
|
||||
db.commit()
|
||||
|
||||
return {"message": "Project mapping deleted successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete project mapping: {str(e)}")
|
||||
|
||||
# Statistics endpoint
|
||||
@router.get("/stats")
|
||||
async def get_admin_stats(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get admin statistics"""
|
||||
try:
|
||||
# API key statistics
|
||||
total_keys = db.query(APIKey).count()
|
||||
active_keys = db.query(APIKey).filter(APIKey.is_active == True).count()
|
||||
|
||||
# Recently used keys
|
||||
recent_keys = db.query(APIKey).filter(
|
||||
APIKey.last_used >= datetime.utcnow() - timedelta(days=7)
|
||||
).count()
|
||||
|
||||
# Project mapping statistics
|
||||
total_mappings = db.query(ProjectMapping).count()
|
||||
|
||||
return {
|
||||
"api_keys": {
|
||||
"total": total_keys,
|
||||
"active": active_keys,
|
||||
"recently_used": recent_keys
|
||||
},
|
||||
"project_mappings": {
|
||||
"total": total_mappings
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get admin stats: {str(e)}")
|
||||
@ -1,122 +0,0 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import HTTPBearer
|
||||
from sqlalchemy.orm import Session
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Optional
|
||||
import os
|
||||
|
||||
from ..models.database import get_db, APIKey
|
||||
from ..auth.middleware import auth_middleware
|
||||
|
||||
router = APIRouter(prefix="/api/auth", tags=["authentication"])
|
||||
|
||||
# Request/Response models
|
||||
class LoginRequest(BaseModel):
|
||||
secret_key: str
|
||||
|
||||
class LoginResponse(BaseModel):
|
||||
token: str
|
||||
|
||||
class APIKeyCreate(BaseModel):
|
||||
description: str
|
||||
|
||||
class APIKeyResponse(BaseModel):
|
||||
id: int
|
||||
key: str
|
||||
description: Optional[str]
|
||||
created_at: str
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
class APIKeyList(BaseModel):
|
||||
keys: List[APIKeyResponse]
|
||||
|
||||
# Get admin secret key
|
||||
def get_admin_secret_key():
|
||||
return os.getenv("ADMIN_SECRET_KEY", "admin-secret-key-change-in-production")
|
||||
|
||||
@router.post("/login", response_model=LoginResponse)
|
||||
async def login(request: LoginRequest):
|
||||
"""Admin login"""
|
||||
admin_key = get_admin_secret_key()
|
||||
|
||||
if request.secret_key != admin_key:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid secret key"
|
||||
)
|
||||
|
||||
# Generate JWT token
|
||||
token = auth_middleware.create_access_token(
|
||||
data={"sub": "admin", "role": "admin"}
|
||||
)
|
||||
|
||||
return LoginResponse(token=token)
|
||||
|
||||
@router.post("/keys", response_model=APIKeyResponse)
|
||||
async def create_api_key(
|
||||
request: APIKeyCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(auth_middleware.get_current_user)
|
||||
):
|
||||
"""Create a new API key"""
|
||||
# Generate new API key
|
||||
api_key_value = auth_middleware.generate_api_key()
|
||||
|
||||
# Save to database
|
||||
db_key = APIKey(
|
||||
key=api_key_value,
|
||||
description=request.description
|
||||
)
|
||||
|
||||
db.add(db_key)
|
||||
db.commit()
|
||||
db.refresh(db_key)
|
||||
|
||||
return APIKeyResponse(
|
||||
id=db_key.id,
|
||||
key=db_key.key,
|
||||
description=db_key.description,
|
||||
created_at=db_key.created_at.isoformat()
|
||||
)
|
||||
|
||||
@router.get("/keys", response_model=APIKeyList)
|
||||
async def list_api_keys(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(auth_middleware.get_current_user)
|
||||
):
|
||||
"""Get all API keys"""
|
||||
keys = db.query(APIKey).order_by(APIKey.created_at.desc()).all()
|
||||
|
||||
return APIKeyList(
|
||||
keys=[
|
||||
APIKeyResponse(
|
||||
id=key.id,
|
||||
key=key.key,
|
||||
description=key.description,
|
||||
created_at=key.created_at.isoformat()
|
||||
)
|
||||
for key in keys
|
||||
]
|
||||
)
|
||||
|
||||
@router.delete("/keys/{key_id}")
|
||||
async def delete_api_key(
|
||||
key_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(auth_middleware.get_current_user)
|
||||
):
|
||||
"""Delete API key"""
|
||||
key = db.query(APIKey).filter(APIKey.id == key_id).first()
|
||||
|
||||
if not key:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="API key not found"
|
||||
)
|
||||
|
||||
db.delete(key)
|
||||
db.commit()
|
||||
|
||||
return {"message": "API key deleted successfully"}
|
||||
@ -1,21 +0,0 @@
|
||||
"""
|
||||
Health check handler
|
||||
Provides health check endpoints for the API
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter
|
||||
from app.config import get_settings
|
||||
from datetime import datetime
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
settings = get_settings()
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "Gitea Webhook Ambassador",
|
||||
"version": settings.version,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
@ -1,106 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.trigger_log import TriggerLog
|
||||
from app.auth import get_current_user
|
||||
|
||||
router = APIRouter(prefix="/api/logs", tags=["logs"])
|
||||
|
||||
|
||||
class TriggerLogResponse(BaseModel):
|
||||
id: int
|
||||
repository_name: str
|
||||
branch_name: str
|
||||
commit_sha: str
|
||||
job_name: str
|
||||
status: str
|
||||
error_message: Optional[str] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
@router.get("/", response_model=List[TriggerLogResponse])
|
||||
async def get_trigger_logs(
|
||||
repository: Optional[str] = Query(None, description="Repository name filter"),
|
||||
branch: Optional[str] = Query(None, description="Branch name filter"),
|
||||
since: Optional[str] = Query(None, description="Since timestamp (RFC3339 format)"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Maximum number of logs to return"),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""
|
||||
Get trigger logs
|
||||
"""
|
||||
try:
|
||||
# Build query
|
||||
query = db.query(TriggerLog)
|
||||
|
||||
# Apply filters
|
||||
if repository:
|
||||
query = query.filter(TriggerLog.repository_name == repository)
|
||||
if branch:
|
||||
query = query.filter(TriggerLog.branch_name == branch)
|
||||
if since:
|
||||
try:
|
||||
since_time = datetime.fromisoformat(since.replace('Z', '+00:00'))
|
||||
query = query.filter(TriggerLog.created_at >= since_time)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Invalid since parameter format (use RFC3339)"
|
||||
)
|
||||
|
||||
# Order by time desc and limit
|
||||
logs = query.order_by(TriggerLog.created_at.desc()).limit(limit).all()
|
||||
|
||||
return logs
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get trigger logs: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
async def get_log_stats(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""
|
||||
Get log statistics
|
||||
"""
|
||||
try:
|
||||
# Total logs
|
||||
total_logs = db.query(TriggerLog).count()
|
||||
|
||||
# Successful and failed logs
|
||||
successful_logs = db.query(TriggerLog).filter(TriggerLog.status == "success").count()
|
||||
failed_logs = db.query(TriggerLog).filter(TriggerLog.status == "failed").count()
|
||||
|
||||
# Logs in the last 24 hours
|
||||
yesterday = datetime.utcnow() - timedelta(days=1)
|
||||
recent_logs = db.query(TriggerLog).filter(TriggerLog.created_at >= yesterday).count()
|
||||
|
||||
# Stats by repository
|
||||
repo_stats = db.query(
|
||||
TriggerLog.repository_name,
|
||||
db.func.count(TriggerLog.id).label('count')
|
||||
).group_by(TriggerLog.repository_name).all()
|
||||
|
||||
return {
|
||||
"total_logs": total_logs,
|
||||
"successful_logs": successful_logs,
|
||||
"failed_logs": failed_logs,
|
||||
"recent_logs_24h": recent_logs,
|
||||
"repository_stats": [
|
||||
{"repository": repo, "count": count}
|
||||
for repo, count in repo_stats
|
||||
]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get log stats: {str(e)}")
|
||||
@ -1,161 +0,0 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Optional
|
||||
|
||||
from ..models.database import get_db, ProjectMapping, BranchJob, BranchPattern
|
||||
from ..auth.middleware import auth_middleware
|
||||
|
||||
router = APIRouter(prefix="/api/projects", tags=["projects"])
|
||||
|
||||
# Request/Response models
|
||||
class ProjectCreate(BaseModel):
|
||||
name: str
|
||||
jenkinsJob: str
|
||||
giteaRepo: str
|
||||
|
||||
class ProjectResponse(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
jenkinsJob: str
|
||||
giteaRepo: str
|
||||
created_at: str
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
class ProjectList(BaseModel):
|
||||
projects: List[ProjectResponse]
|
||||
|
||||
@router.post("/", response_model=ProjectResponse)
|
||||
async def create_project(
|
||||
request: ProjectCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(auth_middleware.get_current_user)
|
||||
):
|
||||
"""Create new project mapping"""
|
||||
# Check if project already exists
|
||||
existing_project = db.query(ProjectMapping).filter(
|
||||
ProjectMapping.repository_name == request.giteaRepo
|
||||
).first()
|
||||
|
||||
if existing_project:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Project with this repository already exists"
|
||||
)
|
||||
|
||||
# Create new project
|
||||
project = ProjectMapping(
|
||||
repository_name=request.giteaRepo,
|
||||
default_job=request.jenkinsJob
|
||||
)
|
||||
|
||||
db.add(project)
|
||||
db.commit()
|
||||
db.refresh(project)
|
||||
|
||||
return ProjectResponse(
|
||||
id=project.id,
|
||||
name=request.name,
|
||||
jenkinsJob=project.default_job,
|
||||
giteaRepo=project.repository_name,
|
||||
created_at=project.created_at.isoformat()
|
||||
)
|
||||
|
||||
@router.get("/", response_model=ProjectList)
|
||||
async def list_projects(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(auth_middleware.get_current_user)
|
||||
):
|
||||
"""Get all projects"""
|
||||
projects = db.query(ProjectMapping).order_by(ProjectMapping.created_at.desc()).all()
|
||||
|
||||
return ProjectList(
|
||||
projects=[
|
||||
ProjectResponse(
|
||||
id=project.id,
|
||||
name=project.repository_name.split('/')[-1], # Use repo name as project name
|
||||
jenkinsJob=project.default_job,
|
||||
giteaRepo=project.repository_name,
|
||||
created_at=project.created_at.isoformat()
|
||||
)
|
||||
for project in projects
|
||||
]
|
||||
)
|
||||
|
||||
@router.get("/{project_id}", response_model=ProjectResponse)
|
||||
async def get_project(
|
||||
project_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(auth_middleware.get_current_user)
|
||||
):
|
||||
"""Get specific project"""
|
||||
project = db.query(ProjectMapping).filter(ProjectMapping.id == project_id).first()
|
||||
|
||||
if not project:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Project not found"
|
||||
)
|
||||
|
||||
return ProjectResponse(
|
||||
id=project.id,
|
||||
name=project.repository_name.split('/')[-1],
|
||||
jenkinsJob=project.default_job,
|
||||
giteaRepo=project.repository_name,
|
||||
created_at=project.created_at.isoformat()
|
||||
)
|
||||
|
||||
@router.delete("/{project_id}")
|
||||
async def delete_project(
|
||||
project_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(auth_middleware.get_current_user)
|
||||
):
|
||||
"""Delete project"""
|
||||
project = db.query(ProjectMapping).filter(ProjectMapping.id == project_id).first()
|
||||
|
||||
if not project:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Project not found"
|
||||
)
|
||||
|
||||
db.delete(project)
|
||||
db.commit()
|
||||
|
||||
return {"message": "Project deleted successfully"}
|
||||
|
||||
@router.get("/mapping/{repository_name}")
|
||||
async def get_project_mapping(
|
||||
repository_name: str,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Get project mapping by repository name (for webhook processing)"""
|
||||
project = db.query(ProjectMapping).filter(
|
||||
ProjectMapping.repository_name == repository_name
|
||||
).first()
|
||||
|
||||
if not project:
|
||||
return None
|
||||
|
||||
return {
|
||||
"id": project.id,
|
||||
"repository_name": project.repository_name,
|
||||
"default_job": project.default_job,
|
||||
"branch_jobs": [
|
||||
{
|
||||
"branch_name": job.branch_name,
|
||||
"job_name": job.job_name
|
||||
}
|
||||
for job in project.branch_jobs
|
||||
],
|
||||
"branch_patterns": [
|
||||
{
|
||||
"pattern": pattern.pattern,
|
||||
"job_name": pattern.job_name
|
||||
}
|
||||
for pattern in project.branch_patterns
|
||||
]
|
||||
}
|
||||
@ -1,43 +0,0 @@
|
||||
"""
|
||||
Webhook handler
|
||||
Handles webhook requests from Gitea
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from app.services.webhook_service import WebhookService
|
||||
from app.services.dedup_service import DeduplicationService
|
||||
from app.tasks.jenkins_tasks import get_celery_app
|
||||
from app.main import webhook_service
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
def get_webhook_service() -> WebhookService:
|
||||
"""Get webhook service instance"""
|
||||
if webhook_service is None:
|
||||
raise HTTPException(status_code=503, detail="Webhook service not available")
|
||||
return webhook_service
|
||||
|
||||
@router.post("/gitea")
|
||||
async def handle_gitea_webhook(
|
||||
request: Request,
|
||||
webhook_service: WebhookService = Depends(get_webhook_service)
|
||||
):
|
||||
"""Handle Gitea webhook request"""
|
||||
if webhook_service is None:
|
||||
raise HTTPException(status_code=503, detail="Webhook service not available")
|
||||
|
||||
try:
|
||||
# Get request body
|
||||
body = await request.body()
|
||||
|
||||
# Process webhook
|
||||
result = await webhook_service.process_webhook(body, request.headers)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Webhook processed successfully",
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
@ -1,419 +0,0 @@
|
||||
"""
|
||||
Main entry for FastAPI application
|
||||
Integrates webhook handling, deduplication, queue management, and related services
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Dict, Any
|
||||
import structlog
|
||||
from fastapi import FastAPI, Request, HTTPException, Depends
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse, Response
|
||||
from redis import asyncio as aioredis
|
||||
from prometheus_client import Counter, Histogram, Gauge, generate_latest, CONTENT_TYPE_LATEST
|
||||
from datetime import datetime
|
||||
|
||||
from app.config import get_settings
|
||||
from app.services.dedup_service import DeduplicationService
|
||||
from app.services.jenkins_service import JenkinsService
|
||||
from app.services.webhook_service import WebhookService
|
||||
from app.tasks.jenkins_tasks import get_celery_app
|
||||
# Route imports will be dynamically handled at runtime
|
||||
|
||||
# Configure structured logging
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.filter_by_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.format_exc_info,
|
||||
structlog.processors.UnicodeDecoder(),
|
||||
structlog.processors.JSONRenderer()
|
||||
],
|
||||
context_class=dict,
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
wrapper_class=structlog.stdlib.BoundLogger,
|
||||
cache_logger_on_first_use=True,
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Monitoring metrics
|
||||
WEBHOOK_REQUESTS_TOTAL = Counter(
|
||||
"webhook_requests_total",
|
||||
"Total number of webhook requests",
|
||||
["status", "environment"]
|
||||
)
|
||||
|
||||
WEBHOOK_REQUEST_DURATION = Histogram(
|
||||
"webhook_request_duration_seconds",
|
||||
"Webhook request duration in seconds",
|
||||
["environment"]
|
||||
)
|
||||
|
||||
QUEUE_SIZE = Gauge(
|
||||
"queue_size",
|
||||
"Current queue size",
|
||||
["queue_type"]
|
||||
)
|
||||
|
||||
DEDUP_HITS = Counter(
|
||||
"dedup_hits_total",
|
||||
"Total number of deduplication hits"
|
||||
)
|
||||
|
||||
# Global service instances
|
||||
dedup_service: DeduplicationService = None
|
||||
jenkins_service: JenkinsService = None
|
||||
webhook_service: WebhookService = None
|
||||
celery_app = None
|
||||
redis_client: aioredis.Redis = None
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Application lifecycle management"""
|
||||
global dedup_service, jenkins_service, webhook_service, celery_app, redis_client
|
||||
|
||||
# Initialize on startup
|
||||
logger.info("Starting Gitea Webhook Ambassador")
|
||||
|
||||
try:
|
||||
# Initialize Redis connection
|
||||
settings = get_settings()
|
||||
redis_client = aioredis.from_url(
|
||||
settings.redis.url,
|
||||
password=settings.redis.password,
|
||||
db=settings.redis.db,
|
||||
encoding="utf-8",
|
||||
decode_responses=True
|
||||
)
|
||||
|
||||
# Test Redis connection
|
||||
await redis_client.ping()
|
||||
logger.info("Redis connection established")
|
||||
|
||||
# Initialize Celery
|
||||
celery_app = get_celery_app()
|
||||
|
||||
# Initialize services
|
||||
dedup_service = DeduplicationService(redis_client)
|
||||
jenkins_service = JenkinsService()
|
||||
webhook_service = WebhookService(
|
||||
dedup_service=dedup_service,
|
||||
jenkins_service=jenkins_service,
|
||||
celery_app=celery_app
|
||||
)
|
||||
|
||||
logger.info("All services initialized successfully")
|
||||
|
||||
yield
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize services", error=str(e))
|
||||
raise
|
||||
|
||||
finally:
|
||||
# Cleanup on shutdown
|
||||
logger.info("Shutting down Gitea Webhook Ambassador")
|
||||
|
||||
if redis_client:
|
||||
await redis_client.close()
|
||||
logger.info("Redis connection closed")
|
||||
|
||||
|
||||
# Create FastAPI application
|
||||
app = FastAPI(
|
||||
title="Gitea Webhook Ambassador",
|
||||
description="High-performance Gitea to Jenkins Webhook service",
|
||||
version="1.0.0",
|
||||
lifespan=lifespan
|
||||
)
|
||||
|
||||
# Add CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # In production, restrict to specific domains
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
# Dependency injection
|
||||
def get_dedup_service() -> DeduplicationService:
|
||||
if dedup_service is None:
|
||||
raise HTTPException(status_code=503, detail="Deduplication service not available")
|
||||
return dedup_service
|
||||
|
||||
|
||||
def get_webhook_service() -> WebhookService:
|
||||
if webhook_service is None:
|
||||
raise HTTPException(status_code=503, detail="Webhook service not available")
|
||||
return webhook_service
|
||||
|
||||
|
||||
def get_celery_app_dep():
|
||||
if celery_app is None:
|
||||
raise HTTPException(status_code=503, detail="Celery app not available")
|
||||
return celery_app
|
||||
|
||||
|
||||
# Middleware
|
||||
@app.middleware("http")
|
||||
async def log_requests(request: Request, call_next):
|
||||
"""Request logging middleware"""
|
||||
start_time = asyncio.get_event_loop().time()
|
||||
|
||||
# Log request start
|
||||
logger.info("Request started",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
client_ip=request.client.host if request.client else None)
|
||||
|
||||
try:
|
||||
response = await call_next(request)
|
||||
|
||||
# Log request complete
|
||||
process_time = asyncio.get_event_loop().time() - start_time
|
||||
logger.info("Request completed",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
status_code=response.status_code,
|
||||
process_time=process_time)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
# Log request error
|
||||
process_time = asyncio.get_event_loop().time() - start_time
|
||||
logger.error("Request failed",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
error=str(e),
|
||||
process_time=process_time)
|
||||
raise
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def add_security_headers(request: Request, call_next):
|
||||
"""Add security headers"""
|
||||
response = await call_next(request)
|
||||
|
||||
# Add security-related HTTP headers
|
||||
response.headers["X-Content-Type-Options"] = "nosniff"
|
||||
response.headers["X-Frame-Options"] = "DENY"
|
||||
response.headers["X-XSS-Protection"] = "1; mode=block"
|
||||
response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
|
||||
|
||||
return response
|
||||
|
||||
|
||||
# Exception handler
|
||||
@app.exception_handler(Exception)
|
||||
async def global_exception_handler(request: Request, exc: Exception):
|
||||
"""Global exception handler"""
|
||||
logger.error("Unhandled exception",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
error=str(exc),
|
||||
exc_info=True)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={
|
||||
"success": False,
|
||||
"message": "Internal server error",
|
||||
"error": str(exc) if get_settings().debug else "An unexpected error occurred"
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# Health check endpoint
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
|
||||
try:
|
||||
# Check Redis connection
|
||||
if redis_client:
|
||||
await redis_client.ping()
|
||||
redis_healthy = True
|
||||
else:
|
||||
redis_healthy = False
|
||||
# Check Celery connection
|
||||
if celery_app:
|
||||
inspect = celery_app.control.inspect()
|
||||
celery_healthy = bool(inspect.active() is not None)
|
||||
# Worker pool/queue info
|
||||
active = inspect.active() or {}
|
||||
reserved = inspect.reserved() or {}
|
||||
worker_count = len(inspect.registered() or {})
|
||||
active_count = sum(len(tasks) for tasks in active.values())
|
||||
reserved_count = sum(len(tasks) for tasks in reserved.values())
|
||||
else:
|
||||
celery_healthy = False
|
||||
worker_count = 0
|
||||
active_count = 0
|
||||
reserved_count = 0
|
||||
# Jenkins
|
||||
jenkins_status = "healthy"
|
||||
|
||||
return {
|
||||
"status": "healthy" if redis_healthy and celery_healthy else "unhealthy",
|
||||
"service": "gitea-webhook-ambassador-python",
|
||||
"version": "2.0.0",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"jenkins": {
|
||||
"status": jenkins_status,
|
||||
"message": "Jenkins connection mock"
|
||||
},
|
||||
"worker_pool": {
|
||||
"active_workers": worker_count,
|
||||
"queue_size": active_count + reserved_count,
|
||||
"total_processed": 0, # 可补充
|
||||
"total_failed": 0 # 可补充
|
||||
},
|
||||
"services": {
|
||||
"redis": "healthy" if redis_healthy else "unhealthy",
|
||||
"celery": "healthy" if celery_healthy else "unhealthy"
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Health check failed", error=str(e))
|
||||
return JSONResponse(
|
||||
status_code=503,
|
||||
content={
|
||||
"status": "unhealthy",
|
||||
"error": str(e)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@app.get("/health/queue")
|
||||
async def queue_health_check():
|
||||
"""Queue health check"""
|
||||
try:
|
||||
if celery_app is None:
|
||||
return JSONResponse(
|
||||
status_code=503,
|
||||
content={"status": "unhealthy", "error": "Celery not available"}
|
||||
)
|
||||
|
||||
inspect = celery_app.control.inspect()
|
||||
|
||||
# Get queue stats
|
||||
active = inspect.active()
|
||||
reserved = inspect.reserved()
|
||||
registered = inspect.registered()
|
||||
|
||||
active_count = sum(len(tasks) for tasks in (active or {}).values())
|
||||
reserved_count = sum(len(tasks) for tasks in (reserved or {}).values())
|
||||
worker_count = len(registered or {})
|
||||
|
||||
# Update monitoring metrics
|
||||
QUEUE_SIZE.labels(queue_type="active").set(active_count)
|
||||
QUEUE_SIZE.labels(queue_type="reserved").set(reserved_count)
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"queue_stats": {
|
||||
"active_tasks": active_count,
|
||||
"queued_tasks": reserved_count,
|
||||
"worker_count": worker_count,
|
||||
"total_queue_length": active_count + reserved_count
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Queue health check failed", error=str(e))
|
||||
return JSONResponse(
|
||||
status_code=503,
|
||||
content={
|
||||
"status": "unhealthy",
|
||||
"error": str(e)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# Metrics endpoint
|
||||
@app.get("/metrics")
|
||||
async def metrics():
|
||||
"""Prometheus metrics endpoint"""
|
||||
return Response(
|
||||
content=generate_latest(),
|
||||
media_type=CONTENT_TYPE_LATEST
|
||||
)
|
||||
|
||||
|
||||
# Register routers for webhook, health, and admin APIs
|
||||
from app.handlers import webhook, health, admin
|
||||
app.include_router(webhook.router, prefix="/webhook", tags=["webhook"])
|
||||
app.include_router(health.router, prefix="/health", tags=["health"])
|
||||
app.include_router(admin.router, prefix="/admin", tags=["admin"])
|
||||
|
||||
# Root path
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Root path"""
|
||||
return {
|
||||
"name": "Gitea Webhook Ambassador",
|
||||
"version": "1.0.0",
|
||||
"description": "High-performance Gitea to Jenkins Webhook service",
|
||||
"endpoints": {
|
||||
"webhook": "/webhook/gitea",
|
||||
"health": "/health",
|
||||
"metrics": "/metrics",
|
||||
"admin": "/admin"
|
||||
}
|
||||
}
|
||||
|
||||
# --- Minimal Go-version-compatible endpoints ---
|
||||
from fastapi import status
|
||||
|
||||
@app.post("/webhook/gitea")
|
||||
async def webhook_gitea(request: Request):
|
||||
"""Minimal Gitea webhook endpoint (mock)"""
|
||||
body = await request.body()
|
||||
# TODO: Replace with real webhook processing logic
|
||||
return {"success": True, "message": "Webhook received (mock)", "body_size": len(body)}
|
||||
|
||||
@app.get("/metrics")
|
||||
async def metrics_endpoint():
|
||||
"""Minimal Prometheus metrics endpoint (mock)"""
|
||||
# TODO: Replace with real Prometheus metrics
|
||||
return Response(
|
||||
content="# HELP webhook_requests_total Total number of webhook requests\nwebhook_requests_total 0\n",
|
||||
media_type="text/plain"
|
||||
)
|
||||
|
||||
@app.get("/health/queue")
|
||||
async def health_queue():
|
||||
"""Minimal queue health endpoint (mock)"""
|
||||
# TODO: Replace with real queue stats
|
||||
return {
|
||||
"status": "healthy",
|
||||
"queue_stats": {
|
||||
"active_tasks": 0,
|
||||
"queued_tasks": 0,
|
||||
"worker_count": 1,
|
||||
"total_queue_length": 0
|
||||
}
|
||||
}
|
||||
# --- End minimal endpoints ---
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
settings = get_settings()
|
||||
uvicorn.run(
|
||||
"app.main:app",
|
||||
host=settings.host,
|
||||
port=settings.port,
|
||||
reload=settings.debug,
|
||||
log_level=settings.logging.level.lower()
|
||||
)
|
||||
@ -1,438 +0,0 @@
|
||||
from fastapi import FastAPI, Request, Depends, HTTPException, status
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
import structlog
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any, Optional, List
|
||||
from pydantic import BaseModel
|
||||
import secrets
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
# Configure logging
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.filter_by_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.format_exc_info,
|
||||
structlog.processors.UnicodeDecoder(),
|
||||
structlog.processors.JSONRenderer()
|
||||
],
|
||||
context_class=dict,
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
wrapper_class=structlog.stdlib.BoundLogger,
|
||||
cache_logger_on_first_use=True,
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create FastAPI application
|
||||
app = FastAPI(
|
||||
title="Gitea Webhook Ambassador (Demo)",
|
||||
description="High-performance webhook service from Gitea to Jenkins - Demo Version",
|
||||
version="1.0.0"
|
||||
)
|
||||
|
||||
# Add CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Security configuration
|
||||
security = HTTPBearer(auto_error=False)
|
||||
|
||||
# Demo data storage
|
||||
api_keys = {
|
||||
"demo_admin_key": {
|
||||
"name": "Demo Admin Key",
|
||||
"key_hash": "demo_admin_key",
|
||||
"key_prefix": "demo_adm",
|
||||
"created_at": datetime.utcnow(),
|
||||
"last_used": datetime.utcnow(),
|
||||
"is_active": True,
|
||||
"role": "admin"
|
||||
},
|
||||
"demo_user_key": {
|
||||
"name": "Demo User Key",
|
||||
"key_hash": "demo_user_key",
|
||||
"key_prefix": "demo_usr",
|
||||
"created_at": datetime.utcnow(),
|
||||
"last_used": datetime.utcnow(),
|
||||
"is_active": True,
|
||||
"role": "user"
|
||||
}
|
||||
}
|
||||
|
||||
trigger_logs = [
|
||||
{
|
||||
"id": 1,
|
||||
"repository_name": "freeleaps/test-project",
|
||||
"branch_name": "main",
|
||||
"commit_sha": "abc123def456",
|
||||
"job_name": "test-project-build",
|
||||
"status": "success",
|
||||
"error_message": None,
|
||||
"created_at": datetime.utcnow() - timedelta(hours=2)
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"repository_name": "freeleaps/another-project",
|
||||
"branch_name": "dev",
|
||||
"commit_sha": "def456ghi789",
|
||||
"job_name": "another-project-dev",
|
||||
"status": "success",
|
||||
"error_message": None,
|
||||
"created_at": datetime.utcnow() - timedelta(hours=1)
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"repository_name": "freeleaps/test-project",
|
||||
"branch_name": "feature/new-feature",
|
||||
"commit_sha": "ghi789jkl012",
|
||||
"job_name": "test-project-feature",
|
||||
"status": "failed",
|
||||
"error_message": "Build timeout",
|
||||
"created_at": datetime.utcnow() - timedelta(minutes=30)
|
||||
}
|
||||
]
|
||||
|
||||
project_mappings = {
|
||||
1: {
|
||||
"repository_name": "freeleaps/test-project",
|
||||
"default_job": "test-project-build",
|
||||
"branch_jobs": [
|
||||
{"branch": "dev", "job": "test-project-dev"},
|
||||
{"branch": "staging", "job": "test-project-staging"}
|
||||
],
|
||||
"branch_patterns": [
|
||||
{"pattern": "feature/*", "job": "test-project-feature"},
|
||||
{"pattern": "hotfix/*", "job": "test-project-hotfix"}
|
||||
],
|
||||
"created_at": datetime.utcnow() - timedelta(days=1),
|
||||
"updated_at": datetime.utcnow() - timedelta(hours=6)
|
||||
}
|
||||
}
|
||||
|
||||
# Request/response models
|
||||
class HealthResponse(BaseModel):
|
||||
status: str
|
||||
service: str
|
||||
version: str
|
||||
timestamp: datetime
|
||||
jenkins: Dict[str, Any]
|
||||
worker_pool: Dict[str, Any]
|
||||
database: Dict[str, Any]
|
||||
|
||||
class TriggerLogResponse(BaseModel):
|
||||
id: int
|
||||
repository_name: str
|
||||
branch_name: str
|
||||
commit_sha: str
|
||||
job_name: str
|
||||
status: str
|
||||
error_message: Optional[str] = None
|
||||
created_at: datetime
|
||||
|
||||
class APIKeyResponse(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
key_prefix: str
|
||||
created_at: datetime
|
||||
last_used: datetime
|
||||
is_active: bool
|
||||
role: str
|
||||
|
||||
class ProjectMappingResponse(BaseModel):
|
||||
id: int
|
||||
repository_name: str
|
||||
default_job: str
|
||||
branch_jobs: List[dict]
|
||||
branch_patterns: List[dict]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
# Authentication functions
|
||||
def verify_api_key(api_key: str):
|
||||
"""Verify API key"""
|
||||
for key_id, key_data in api_keys.items():
|
||||
if key_data["key_hash"] == api_key and key_data["is_active"]:
|
||||
# Update last used time
|
||||
key_data["last_used"] = datetime.utcnow()
|
||||
return key_data
|
||||
return None
|
||||
|
||||
async def get_current_user(
|
||||
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security)
|
||||
):
|
||||
"""Get current user (supports API key authentication)"""
|
||||
if not credentials:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication token required",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
token = credentials.credentials
|
||||
|
||||
# Verify API key
|
||||
api_key_data = verify_api_key(token)
|
||||
if api_key_data:
|
||||
return {
|
||||
"username": api_key_data["name"],
|
||||
"auth_type": "api_key",
|
||||
"role": api_key_data["role"]
|
||||
}
|
||||
|
||||
# Authentication failed
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid authentication token",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
# Public endpoints
|
||||
@app.get("/health", response_model=HealthResponse)
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
settings = get_settings()
|
||||
|
||||
return HealthResponse(
|
||||
status="healthy",
|
||||
service="Gitea Webhook Ambassador (Demo)",
|
||||
version=settings.version,
|
||||
timestamp=datetime.utcnow(),
|
||||
jenkins={"status": "connected"},
|
||||
worker_pool={
|
||||
"active_workers": 2,
|
||||
"queue_size": 0,
|
||||
"total_processed": len(trigger_logs),
|
||||
"total_failed": len([log for log in trigger_logs if log["status"] == "failed"])
|
||||
},
|
||||
database={"status": "connected"}
|
||||
)
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Root path"""
|
||||
return {
|
||||
"name": "Gitea Webhook Ambassador (Demo)",
|
||||
"version": "1.0.0",
|
||||
"description": "High-performance webhook service from Gitea to Jenkins - Demo Version",
|
||||
"endpoints": {
|
||||
"webhook": "/webhook/gitea",
|
||||
"health": "/health",
|
||||
"logs": "/api/logs",
|
||||
"admin": "/api/admin"
|
||||
},
|
||||
"demo_keys": {
|
||||
"admin": "demo_admin_key",
|
||||
"user": "demo_user_key"
|
||||
}
|
||||
}
|
||||
|
||||
@app.post("/webhook/gitea")
|
||||
async def handle_gitea_webhook(request: Request):
|
||||
"""Handle Gitea webhook request"""
|
||||
try:
|
||||
body = await request.body()
|
||||
|
||||
# Log webhook request
|
||||
logger.info("Received Gitea webhook",
|
||||
body_size=len(body),
|
||||
headers=dict(request.headers))
|
||||
|
||||
# Add new trigger log
|
||||
log_entry = {
|
||||
"id": len(trigger_logs) + 1,
|
||||
"repository_name": "demo-repo",
|
||||
"branch_name": "main",
|
||||
"commit_sha": "demo123",
|
||||
"job_name": "demo-job",
|
||||
"status": "success",
|
||||
"error_message": None,
|
||||
"created_at": datetime.utcnow()
|
||||
}
|
||||
trigger_logs.append(log_entry)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Webhook received successfully",
|
||||
"data": {
|
||||
"body_size": len(body),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Webhook processing failed", error=str(e))
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={
|
||||
"success": False,
|
||||
"message": "Webhook processing failed",
|
||||
"error": str(e)
|
||||
}
|
||||
)
|
||||
|
||||
# Authenticated endpoints
|
||||
@app.get("/api/logs", response_model=List[TriggerLogResponse])
|
||||
async def get_trigger_logs(
|
||||
repository: Optional[str] = None,
|
||||
branch: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get trigger logs"""
|
||||
print(f"User {current_user['username']} accessed logs endpoint")
|
||||
|
||||
filtered_logs = trigger_logs.copy()
|
||||
|
||||
if repository:
|
||||
filtered_logs = [log for log in filtered_logs if log["repository_name"] == repository]
|
||||
if branch:
|
||||
filtered_logs = [log for log in filtered_logs if log["branch_name"] == branch]
|
||||
|
||||
# Sort by time descending and limit
|
||||
filtered_logs.sort(key=lambda x: x["created_at"], reverse=True)
|
||||
return filtered_logs[:limit]
|
||||
|
||||
@app.get("/api/logs/stats")
|
||||
async def get_log_stats(current_user: dict = Depends(get_current_user)):
|
||||
"""Get log statistics"""
|
||||
print(f"User {current_user['username']} accessed log statistics")
|
||||
|
||||
total_logs = len(trigger_logs)
|
||||
successful_logs = len([log for log in trigger_logs if log["status"] == "success"])
|
||||
failed_logs = len([log for log in trigger_logs if log["status"] == "failed"])
|
||||
|
||||
# Logs in the last 24 hours
|
||||
yesterday = datetime.utcnow() - timedelta(days=1)
|
||||
recent_logs = len([log for log in trigger_logs if log["created_at"] >= yesterday])
|
||||
|
||||
# Grouped by repository
|
||||
repo_stats = {}
|
||||
for log in trigger_logs:
|
||||
repo = log["repository_name"]
|
||||
repo_stats[repo] = repo_stats.get(repo, 0) + 1
|
||||
|
||||
return {
|
||||
"total_logs": total_logs,
|
||||
"successful_logs": successful_logs,
|
||||
"failed_logs": failed_logs,
|
||||
"recent_logs_24h": recent_logs,
|
||||
"repository_stats": [
|
||||
{"repository": repo, "count": count}
|
||||
for repo, count in repo_stats.items()
|
||||
]
|
||||
}
|
||||
|
||||
@app.get("/api/admin/api-keys", response_model=List[APIKeyResponse])
|
||||
async def list_api_keys(current_user: dict = Depends(get_current_user)):
|
||||
"""List all API keys (admin only)"""
|
||||
if current_user["role"] != "admin":
|
||||
raise HTTPException(status_code=403, detail="Admin privileges required")
|
||||
|
||||
print(f"Admin {current_user['username']} viewed API key list")
|
||||
|
||||
return [
|
||||
APIKeyResponse(
|
||||
id=key_id,
|
||||
name=key_data["name"],
|
||||
key_prefix=key_data["key_prefix"],
|
||||
created_at=key_data["created_at"],
|
||||
last_used=key_data["last_used"],
|
||||
is_active=key_data["is_active"],
|
||||
role=key_data["role"]
|
||||
)
|
||||
for key_id, key_data in api_keys.items()
|
||||
]
|
||||
|
||||
@app.get("/api/admin/projects", response_model=List[ProjectMappingResponse])
|
||||
async def list_project_mappings(current_user: dict = Depends(get_current_user)):
|
||||
"""List all project mappings"""
|
||||
print(f"User {current_user['username']} viewed project mappings")
|
||||
|
||||
return [
|
||||
ProjectMappingResponse(
|
||||
id=mapping_id,
|
||||
repository_name=mapping_data["repository_name"],
|
||||
default_job=mapping_data["default_job"],
|
||||
branch_jobs=mapping_data["branch_jobs"],
|
||||
branch_patterns=mapping_data["branch_patterns"],
|
||||
created_at=mapping_data["created_at"],
|
||||
updated_at=mapping_data["updated_at"]
|
||||
)
|
||||
for mapping_id, mapping_data in project_mappings.items()
|
||||
]
|
||||
|
||||
@app.get("/api/admin/stats")
|
||||
async def get_admin_stats(current_user: dict = Depends(get_current_user)):
|
||||
"""Get admin statistics"""
|
||||
print(f"User {current_user['username']} viewed admin statistics")
|
||||
|
||||
total_keys = len(api_keys)
|
||||
active_keys = len([key for key in api_keys.values() if key["is_active"]])
|
||||
|
||||
# Recently used keys
|
||||
week_ago = datetime.utcnow() - timedelta(days=7)
|
||||
recent_keys = len([
|
||||
key for key in api_keys.values()
|
||||
if key["last_used"] >= week_ago
|
||||
])
|
||||
|
||||
total_mappings = len(project_mappings)
|
||||
|
||||
return {
|
||||
"api_keys": {
|
||||
"total": total_keys,
|
||||
"active": active_keys,
|
||||
"recently_used": recent_keys
|
||||
},
|
||||
"project_mappings": {
|
||||
"total": total_mappings
|
||||
}
|
||||
}
|
||||
|
||||
# Middleware
|
||||
@app.middleware("http")
|
||||
async def log_requests(request: Request, call_next):
|
||||
"""Request logging middleware"""
|
||||
start_time = datetime.utcnow()
|
||||
|
||||
response = await call_next(request)
|
||||
|
||||
process_time = (datetime.utcnow() - start_time).total_seconds()
|
||||
response.headers["X-Process-Time"] = str(process_time)
|
||||
|
||||
return response
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
settings = get_settings()
|
||||
|
||||
print("🚀 Starting Gitea Webhook Ambassador Demo Version")
|
||||
print("=" * 60)
|
||||
print("📋 Demo API Keys:")
|
||||
print(" Admin key: demo_admin_key")
|
||||
print(" User key: demo_user_key")
|
||||
print()
|
||||
print("🔧 Usage examples:")
|
||||
print(" curl -H 'Authorization: Bearer demo_admin_key' http://localhost:8000/api/admin/api-keys")
|
||||
print(" curl -H 'Authorization: Bearer demo_user_key' http://localhost:8000/api/logs")
|
||||
print("=" * 60)
|
||||
|
||||
uvicorn.run(
|
||||
"app.main_demo:app",
|
||||
host=settings.server.host,
|
||||
port=settings.server.port,
|
||||
reload=settings.server.reload
|
||||
)
|
||||
@ -1,515 +0,0 @@
|
||||
from fastapi import FastAPI, Request, Depends, HTTPException, status, Query
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from sqlalchemy.orm import Session
|
||||
import os
|
||||
import time
|
||||
import psutil
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Import database models
|
||||
from app.models.database import create_tables, get_db, APIKey, ProjectMapping, TriggerLog
|
||||
from app.auth.middleware import auth_middleware, get_current_user
|
||||
from app.config import settings
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(
|
||||
title="Gitea Webhook Ambassador",
|
||||
description="High-performance Gitea to Jenkins Webhook service",
|
||||
version="2.0.0"
|
||||
)
|
||||
|
||||
# Add CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Create database tables
|
||||
create_tables()
|
||||
|
||||
# Mount static files
|
||||
app.mount("/static", StaticFiles(directory="app/static"), name="static")
|
||||
|
||||
# Set up templates
|
||||
templates = Jinja2Templates(directory="app/templates")
|
||||
|
||||
# Startup time
|
||||
start_time = datetime.now()
|
||||
|
||||
@app.get("/", response_class=HTMLResponse)
|
||||
async def root(request: Request):
|
||||
"""Root path - redirect to login page"""
|
||||
return RedirectResponse(url="/login")
|
||||
|
||||
@app.get("/login", response_class=HTMLResponse)
|
||||
async def login_page(request: Request):
|
||||
"""Login page"""
|
||||
return templates.TemplateResponse("login.html", {"request": request})
|
||||
|
||||
@app.get("/dashboard", response_class=HTMLResponse)
|
||||
async def dashboard_page(request: Request):
|
||||
"""Dashboard page"""
|
||||
return templates.TemplateResponse("dashboard.html", {"request": request})
|
||||
|
||||
@app.post("/api/auth/login")
|
||||
async def login(request: dict):
|
||||
"""Admin login"""
|
||||
admin_key = os.getenv("ADMIN_SECRET_KEY", "admin-secret-key-change-in-production")
|
||||
|
||||
if request.get("secret_key") != admin_key:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid secret key"
|
||||
)
|
||||
|
||||
# Generate JWT token
|
||||
token = auth_middleware.create_access_token(
|
||||
data={"sub": "admin", "role": "admin"}
|
||||
)
|
||||
|
||||
return {"token": token}
|
||||
|
||||
@app.get("/api/stats")
|
||||
async def get_stats(db: Session = Depends(get_db), current_user: dict = Depends(get_current_user)):
|
||||
"""Get statistics"""
|
||||
try:
|
||||
# Get total number of projects
|
||||
total_projects = db.query(ProjectMapping).count()
|
||||
|
||||
# Get total number of API keys
|
||||
total_api_keys = db.query(APIKey).count()
|
||||
|
||||
# Get today's trigger count
|
||||
today = datetime.now().date()
|
||||
today_triggers = db.query(TriggerLog).filter(
|
||||
TriggerLog.created_at >= today
|
||||
).count()
|
||||
|
||||
# Get successful trigger count
|
||||
successful_triggers = db.query(TriggerLog).filter(
|
||||
TriggerLog.status == "success"
|
||||
).count()
|
||||
|
||||
return {
|
||||
"total_projects": total_projects,
|
||||
"total_api_keys": total_api_keys,
|
||||
"today_triggers": today_triggers,
|
||||
"successful_triggers": successful_triggers
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get statistics: {str(e)}")
|
||||
|
||||
@app.get("/api/keys", response_model=dict)
|
||||
async def list_api_keys(db: Session = Depends(get_db), current_user: dict = Depends(get_current_user)):
|
||||
"""Get all API keys (frontend compatible)"""
|
||||
try:
|
||||
keys = db.query(APIKey).order_by(APIKey.created_at.desc()).all()
|
||||
return {
|
||||
"keys": [
|
||||
{
|
||||
"id": key.id,
|
||||
"key": key.key,
|
||||
"description": key.description,
|
||||
"created_at": key.created_at.isoformat()
|
||||
}
|
||||
for key in keys
|
||||
]
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get API keys: {str(e)}")
|
||||
|
||||
@app.post("/api/keys", response_model=dict)
|
||||
async def create_api_key(
|
||||
request: dict,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Create a new API key (frontend compatible)"""
|
||||
try:
|
||||
# Generate new API key
|
||||
api_key_value = auth_middleware.generate_api_key()
|
||||
|
||||
# Save to database
|
||||
db_key = APIKey(
|
||||
key=api_key_value,
|
||||
description=request.get("description", "")
|
||||
)
|
||||
|
||||
db.add(db_key)
|
||||
db.commit()
|
||||
db.refresh(db_key)
|
||||
|
||||
return {
|
||||
"id": db_key.id,
|
||||
"key": db_key.key,
|
||||
"description": db_key.description,
|
||||
"created_at": db_key.created_at.isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create API key: {str(e)}")
|
||||
|
||||
@app.delete("/api/keys/{key_id}")
|
||||
async def delete_api_key(
|
||||
key_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Delete API key (frontend compatible)"""
|
||||
try:
|
||||
key = db.query(APIKey).filter(APIKey.id == key_id).first()
|
||||
|
||||
if not key:
|
||||
raise HTTPException(status_code=404, detail="API key does not exist")
|
||||
|
||||
db.delete(key)
|
||||
db.commit()
|
||||
|
||||
return {"message": "API key deleted successfully"}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete API key: {str(e)}")
|
||||
|
||||
@app.get("/api/projects/", response_model=dict)
|
||||
async def list_projects(db: Session = Depends(get_db), current_user: dict = Depends(get_current_user)):
|
||||
"""Get all projects (frontend compatible)"""
|
||||
try:
|
||||
projects = db.query(ProjectMapping).order_by(ProjectMapping.created_at.desc()).all()
|
||||
return {
|
||||
"projects": [
|
||||
{
|
||||
"id": project.id,
|
||||
"name": project.repository_name.split('/')[-1],
|
||||
"jenkinsJob": project.default_job,
|
||||
"giteaRepo": project.repository_name,
|
||||
"created_at": project.created_at.isoformat()
|
||||
}
|
||||
for project in projects
|
||||
]
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get project list: {str(e)}")
|
||||
|
||||
@app.post("/api/projects/", response_model=dict)
|
||||
async def create_project(
|
||||
request: dict,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Create a new project (frontend compatible)"""
|
||||
try:
|
||||
# Check if project already exists
|
||||
existing_project = db.query(ProjectMapping).filter(
|
||||
ProjectMapping.repository_name == request["giteaRepo"]
|
||||
).first()
|
||||
|
||||
if existing_project:
|
||||
raise HTTPException(status_code=400, detail="Project already exists")
|
||||
|
||||
# Create new project
|
||||
project = ProjectMapping(
|
||||
repository_name=request["giteaRepo"],
|
||||
default_job=request["jenkinsJob"]
|
||||
)
|
||||
|
||||
db.add(project)
|
||||
db.commit()
|
||||
db.refresh(project)
|
||||
|
||||
return {
|
||||
"id": project.id,
|
||||
"name": request["name"],
|
||||
"jenkinsJob": project.default_job,
|
||||
"giteaRepo": project.repository_name,
|
||||
"created_at": project.created_at.isoformat()
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create project: {str(e)}")
|
||||
|
||||
@app.delete("/api/projects/{project_id}")
|
||||
async def delete_project(
|
||||
project_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Delete project (frontend compatible)"""
|
||||
try:
|
||||
project = db.query(ProjectMapping).filter(ProjectMapping.id == project_id).first()
|
||||
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project does not exist")
|
||||
|
||||
db.delete(project)
|
||||
db.commit()
|
||||
|
||||
return {"message": "Project deleted successfully"}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete project: {str(e)}")
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint with 'service', 'jenkins', and 'worker_pool' fields for compatibility"""
|
||||
try:
|
||||
# Calculate uptime
|
||||
uptime = datetime.now() - start_time
|
||||
uptime_str = str(uptime).split('.')[0] # Remove microseconds
|
||||
# Get memory usage
|
||||
process = psutil.Process()
|
||||
memory_info = process.memory_info()
|
||||
memory_mb = memory_info.rss / 1024 / 1024
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "gitea-webhook-ambassador-python",
|
||||
"version": "2.0.0",
|
||||
"uptime": uptime_str,
|
||||
"memory": f"{memory_mb:.1f} MB",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"jenkins": {
|
||||
"status": "healthy",
|
||||
"message": "Jenkins connection mock"
|
||||
},
|
||||
"worker_pool": {
|
||||
"active_workers": 1,
|
||||
"queue_size": 0,
|
||||
"total_processed": 0,
|
||||
"total_failed": 0
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"status": "unhealthy",
|
||||
"service": "gitea-webhook-ambassador-python",
|
||||
"error": str(e),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
@app.get("/api/logs")
|
||||
async def get_logs(
|
||||
startTime: str = None,
|
||||
endTime: str = None,
|
||||
level: str = None,
|
||||
query: str = None,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get logs (simplified version)"""
|
||||
try:
|
||||
# Here should be the real log query logic
|
||||
# Currently returns mock data
|
||||
logs = [
|
||||
{
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"level": "info",
|
||||
"message": "System running normally"
|
||||
}
|
||||
]
|
||||
|
||||
return {"logs": logs}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get logs: {str(e)}")
|
||||
|
||||
# --- Minimal Go-version-compatible endpoints ---
|
||||
from fastapi import Response
|
||||
|
||||
@app.post("/webhook/gitea")
|
||||
async def webhook_gitea(request: Request):
|
||||
"""Minimal Gitea webhook endpoint (mock, with 'data' field for compatibility)"""
|
||||
body = await request.body()
|
||||
# TODO: Replace with real webhook processing logic
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Webhook received (mock)",
|
||||
"data": {
|
||||
"body_size": len(body)
|
||||
}
|
||||
}
|
||||
|
||||
@app.get("/metrics")
|
||||
async def metrics_endpoint():
|
||||
"""Minimal Prometheus metrics endpoint (mock)"""
|
||||
# TODO: Replace with real Prometheus metrics
|
||||
return Response(
|
||||
content="# HELP webhook_requests_total Total number of webhook requests\nwebhook_requests_total 0\n",
|
||||
media_type="text/plain"
|
||||
)
|
||||
|
||||
@app.get("/health/queue")
|
||||
async def health_queue():
|
||||
"""Minimal queue health endpoint (mock)"""
|
||||
# TODO: Replace with real queue stats
|
||||
return {
|
||||
"status": "healthy",
|
||||
"queue_stats": {
|
||||
"active_tasks": 0,
|
||||
"queued_tasks": 0,
|
||||
"worker_count": 1,
|
||||
"total_queue_length": 0
|
||||
}
|
||||
}
|
||||
# --- End minimal endpoints ---
|
||||
|
||||
# Additional endpoints for enhanced test compatibility
|
||||
@app.post("/api/admin/api-keys")
|
||||
async def create_admin_api_key(
|
||||
request: dict,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Create API key (enhanced test compatible)"""
|
||||
try:
|
||||
if "name" not in request:
|
||||
raise HTTPException(status_code=400, detail="API key name is required")
|
||||
|
||||
# Generate a random API key
|
||||
import secrets
|
||||
api_key_value = secrets.token_urlsafe(32)
|
||||
|
||||
api_key = APIKey(
|
||||
key=api_key_value,
|
||||
description=request["name"]
|
||||
)
|
||||
|
||||
db.add(api_key)
|
||||
db.commit()
|
||||
db.refresh(api_key)
|
||||
|
||||
return {
|
||||
"id": api_key.id,
|
||||
"name": api_key.description,
|
||||
"key": api_key.key,
|
||||
"description": api_key.description,
|
||||
"created_at": api_key.created_at.isoformat(),
|
||||
"updated_at": api_key.updated_at.isoformat()
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create API key: {str(e)}")
|
||||
|
||||
@app.delete("/api/admin/api-keys/{key_id}")
|
||||
async def delete_admin_api_key_by_id(
|
||||
key_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Delete API key by ID (enhanced test compatible)"""
|
||||
try:
|
||||
api_key = db.query(APIKey).filter(APIKey.id == key_id).first()
|
||||
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=404, detail="API key not found")
|
||||
|
||||
db.delete(api_key)
|
||||
db.commit()
|
||||
|
||||
return {"message": "API key deleted successfully"}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete API key: {str(e)}")
|
||||
|
||||
@app.post("/api/admin/projects")
|
||||
async def create_admin_project(
|
||||
request: dict,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Create project mapping (enhanced test compatible)"""
|
||||
try:
|
||||
if "repository_name" not in request:
|
||||
raise HTTPException(status_code=400, detail="Repository name is required")
|
||||
|
||||
# Check if project already exists
|
||||
existing_project = db.query(ProjectMapping).filter(
|
||||
ProjectMapping.repository_name == request["repository_name"]
|
||||
).first()
|
||||
|
||||
if existing_project:
|
||||
raise HTTPException(status_code=400, detail="Project mapping already exists")
|
||||
|
||||
# Create new project mapping
|
||||
project = ProjectMapping(
|
||||
repository_name=request["repository_name"],
|
||||
default_job=request.get("default_job", "")
|
||||
)
|
||||
|
||||
db.add(project)
|
||||
db.commit()
|
||||
db.refresh(project)
|
||||
|
||||
return {
|
||||
"id": project.id,
|
||||
"repository_name": project.repository_name,
|
||||
"default_job": project.default_job,
|
||||
"branch_jobs": request.get("branch_jobs", []),
|
||||
"branch_patterns": request.get("branch_patterns", []),
|
||||
"created_at": project.created_at.isoformat(),
|
||||
"updated_at": project.updated_at.isoformat()
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create project mapping: {str(e)}")
|
||||
|
||||
@app.get("/api/logs/stats")
|
||||
async def get_logs_stats(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get logs statistics (enhanced test compatible)"""
|
||||
try:
|
||||
# Mock statistics for demo
|
||||
stats = {
|
||||
"total_logs": 150,
|
||||
"successful_logs": 145,
|
||||
"failed_logs": 5,
|
||||
"recent_logs_24h": 25,
|
||||
"repository_stats": [
|
||||
{"repository": "freeleaps/test-project", "count": 50},
|
||||
{"repository": "freeleaps/another-project", "count": 30}
|
||||
]
|
||||
}
|
||||
|
||||
return stats
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get log statistics: {str(e)}")
|
||||
|
||||
@app.get("/api/admin/stats")
|
||||
async def get_admin_stats(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get admin statistics (enhanced test compatible)"""
|
||||
try:
|
||||
# Get real statistics from database
|
||||
total_api_keys = db.query(APIKey).count()
|
||||
total_projects = db.query(ProjectMapping).count()
|
||||
|
||||
stats = {
|
||||
"api_keys": {
|
||||
"total": total_api_keys,
|
||||
"active": total_api_keys,
|
||||
"recently_used": total_api_keys
|
||||
},
|
||||
"project_mappings": {
|
||||
"total": total_projects
|
||||
}
|
||||
}
|
||||
|
||||
return stats
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get admin statistics: {str(e)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
@ -1,145 +0,0 @@
|
||||
"""
|
||||
Simplified FastAPI application entry point
|
||||
For quick start and testing
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
|
||||
from app.config import get_settings
|
||||
from app.handlers.webhook import router as webhook_router
|
||||
from app.handlers.health import router as health_router
|
||||
from app.handlers.logs import router as logs_router
|
||||
from app.handlers.admin import router as admin_router
|
||||
|
||||
# Configure logging
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.filter_by_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.format_exc_info,
|
||||
structlog.processors.UnicodeDecoder(),
|
||||
structlog.processors.JSONRenderer()
|
||||
],
|
||||
context_class=dict,
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
wrapper_class=structlog.stdlib.BoundLogger,
|
||||
cache_logger_on_first_use=True,
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create FastAPI application
|
||||
app = FastAPI(
|
||||
title="Gitea Webhook Ambassador",
|
||||
description="High-performance webhook service from Gitea to Jenkins",
|
||||
version="1.0.0"
|
||||
)
|
||||
|
||||
# Add CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Include routers
|
||||
app.include_router(webhook_router)
|
||||
app.include_router(health_router)
|
||||
app.include_router(logs_router)
|
||||
app.include_router(admin_router)
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Root path"""
|
||||
return {
|
||||
"name": "Gitea Webhook Ambassador",
|
||||
"version": "1.0.0",
|
||||
"description": "High-performance webhook service from Gitea to Jenkins",
|
||||
"endpoints": {
|
||||
"webhook": "/webhook/gitea",
|
||||
"health": "/health",
|
||||
"metrics": "/metrics",
|
||||
"logs": "/api/logs",
|
||||
"admin": "/api/admin"
|
||||
}
|
||||
}
|
||||
|
||||
@app.middleware("http")
|
||||
async def log_requests(request: Request, call_next):
|
||||
"""Request logging middleware"""
|
||||
start_time = datetime.utcnow()
|
||||
|
||||
# Log request
|
||||
logger.info(
|
||||
"Request started",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
client_ip=request.client.host if request.client else None
|
||||
)
|
||||
|
||||
# Process request
|
||||
response = await call_next(request)
|
||||
|
||||
# Calculate processing time
|
||||
process_time = (datetime.utcnow() - start_time).total_seconds()
|
||||
|
||||
# Log response
|
||||
logger.info(
|
||||
"Request completed",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
status_code=response.status_code,
|
||||
process_time=process_time
|
||||
)
|
||||
|
||||
# Add processing time to response header
|
||||
response.headers["X-Process-Time"] = str(process_time)
|
||||
|
||||
return response
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def global_exception_handler(request: Request, exc: Exception):
|
||||
"""Global exception handler"""
|
||||
logger.error(
|
||||
"Unhandled exception",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
error=str(exc),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={
|
||||
"error": "Internal server error",
|
||||
"message": "An unexpected error occurred"
|
||||
}
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
settings = get_settings()
|
||||
|
||||
logger.info(
|
||||
"Starting Gitea Webhook Ambassador",
|
||||
host=settings.server.host,
|
||||
port=settings.server.port,
|
||||
version=settings.version
|
||||
)
|
||||
|
||||
uvicorn.run(
|
||||
"app.main_simple:app",
|
||||
host=settings.server.host,
|
||||
port=settings.server.port,
|
||||
reload=settings.server.reload
|
||||
)
|
||||
@ -1,20 +0,0 @@
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime, Boolean
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class APIKey(Base):
|
||||
"""API key model"""
|
||||
__tablename__ = "api_keys"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String(255), nullable=False)
|
||||
key_hash = Column(String(255), nullable=False, unique=True, index=True)
|
||||
key_prefix = Column(String(8), nullable=False, index=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
last_used = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
is_active = Column(Boolean, default=True, nullable=False, index=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<APIKey(id={self.id}, name={self.name}, prefix={self.key_prefix}, active={self.is_active})>"
|
||||
@ -1,92 +0,0 @@
|
||||
from sqlalchemy import create_engine, Column, Integer, String, DateTime, Text, ForeignKey
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker, relationship
|
||||
from sqlalchemy.sql import func
|
||||
from datetime import datetime
|
||||
import os
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class APIKey(Base):
|
||||
__tablename__ = 'api_keys'
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
key = Column(String(255), unique=True, index=True, nullable=False)
|
||||
description = Column(String(255), nullable=True)
|
||||
created_at = Column(DateTime, default=func.now())
|
||||
updated_at = Column(DateTime, default=func.now(), onupdate=func.now())
|
||||
|
||||
class ProjectMapping(Base):
|
||||
__tablename__ = 'project_mappings'
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
repository_name = Column(String(255), unique=True, index=True, nullable=False)
|
||||
default_job = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime, default=func.now())
|
||||
updated_at = Column(DateTime, default=func.now(), onupdate=func.now())
|
||||
|
||||
# Relationships
|
||||
branch_jobs = relationship("BranchJob", back_populates="project", cascade="all, delete-orphan")
|
||||
branch_patterns = relationship("BranchPattern", back_populates="project", cascade="all, delete-orphan")
|
||||
|
||||
class BranchJob(Base):
|
||||
__tablename__ = 'branch_jobs'
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
project_id = Column(Integer, ForeignKey('project_mappings.id'), nullable=False)
|
||||
branch_name = Column(String(255), nullable=False)
|
||||
job_name = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime, default=func.now())
|
||||
updated_at = Column(DateTime, default=func.now(), onupdate=func.now())
|
||||
|
||||
# Relationship
|
||||
project = relationship("ProjectMapping", back_populates="branch_jobs")
|
||||
|
||||
class BranchPattern(Base):
|
||||
__tablename__ = 'branch_patterns'
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
project_id = Column(Integer, ForeignKey('project_mappings.id'), nullable=False)
|
||||
pattern = Column(String(255), nullable=False)
|
||||
job_name = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime, default=func.now())
|
||||
updated_at = Column(DateTime, default=func.now(), onupdate=func.now())
|
||||
|
||||
# Relationship
|
||||
project = relationship("ProjectMapping", back_populates="branch_patterns")
|
||||
|
||||
class TriggerLog(Base):
|
||||
__tablename__ = 'trigger_logs'
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
repository_name = Column(String(255), nullable=False)
|
||||
branch_name = Column(String(255), nullable=False)
|
||||
commit_sha = Column(String(255), nullable=False)
|
||||
job_name = Column(String(255), nullable=False)
|
||||
status = Column(String(50), nullable=False) # success, failed, pending
|
||||
error_message = Column(Text, nullable=True)
|
||||
created_at = Column(DateTime, default=func.now())
|
||||
|
||||
# Database configuration
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./gitea_webhook_ambassador.db")
|
||||
|
||||
# Create engine
|
||||
engine = create_engine(
|
||||
DATABASE_URL,
|
||||
connect_args={"check_same_thread": False} if DATABASE_URL.startswith("sqlite") else {}
|
||||
)
|
||||
|
||||
# Create session
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
# Create tables
|
||||
def create_tables():
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
# Get database session
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
@ -1,130 +0,0 @@
|
||||
"""
|
||||
Gitea Webhook data model
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
"""Gitea user model"""
|
||||
id: int
|
||||
login: str
|
||||
full_name: Optional[str] = None
|
||||
email: Optional[str] = None
|
||||
username: Optional[str] = None
|
||||
|
||||
|
||||
class Commit(BaseModel):
|
||||
"""Git commit model"""
|
||||
id: str
|
||||
message: str
|
||||
url: str
|
||||
author: User
|
||||
timestamp: Optional[datetime] = None
|
||||
|
||||
|
||||
class Repository(BaseModel):
|
||||
"""Git repository model"""
|
||||
id: int
|
||||
name: str
|
||||
owner: User
|
||||
full_name: str
|
||||
private: bool = False
|
||||
clone_url: str
|
||||
ssh_url: Optional[str] = None
|
||||
html_url: str
|
||||
default_branch: str = "main"
|
||||
|
||||
|
||||
class GiteaWebhook(BaseModel):
|
||||
"""Gitea Webhook model"""
|
||||
secret: Optional[str] = None
|
||||
ref: str
|
||||
before: str
|
||||
after: str
|
||||
compare_url: Optional[str] = None
|
||||
commits: List[Commit] = Field(default_factory=list)
|
||||
repository: Repository
|
||||
pusher: User
|
||||
|
||||
def get_branch_name(self) -> str:
|
||||
"""Extract branch name from ref"""
|
||||
prefix = "refs/heads/"
|
||||
if self.ref.startswith(prefix):
|
||||
return self.ref[len(prefix):]
|
||||
return self.ref
|
||||
|
||||
def get_event_id(self) -> str:
|
||||
"""Generate unique event ID"""
|
||||
return f"{self.repository.full_name}-{self.after}"
|
||||
|
||||
def get_commit_hash(self) -> str:
|
||||
"""Get commit hash"""
|
||||
return self.after
|
||||
|
||||
def get_deduplication_key(self) -> str:
|
||||
"""Generate deduplication key"""
|
||||
branch = self.get_branch_name()
|
||||
return f"{self.after}:{branch}"
|
||||
|
||||
def is_push_event(self) -> bool:
|
||||
"""Determine if it is a push event"""
|
||||
return self.ref.startswith("refs/heads/")
|
||||
|
||||
def is_tag_event(self) -> bool:
|
||||
"""Determine if it is a tag event"""
|
||||
return self.ref.startswith("refs/tags/")
|
||||
|
||||
def get_commit_message(self) -> str:
|
||||
"""Get commit message"""
|
||||
if self.commits:
|
||||
return self.commits[0].message
|
||||
return ""
|
||||
|
||||
def get_author_info(self) -> dict:
|
||||
"""Get author information"""
|
||||
if self.commits:
|
||||
author = self.commits[0].author
|
||||
return {
|
||||
"name": author.full_name or author.login,
|
||||
"email": author.email,
|
||||
"username": author.login
|
||||
}
|
||||
return {
|
||||
"name": self.pusher.full_name or self.pusher.login,
|
||||
"email": self.pusher.email,
|
||||
"username": self.pusher.login
|
||||
}
|
||||
|
||||
|
||||
class WebhookEvent(BaseModel):
|
||||
"""Webhook event model"""
|
||||
id: str
|
||||
repository: str
|
||||
branch: str
|
||||
commit_hash: str
|
||||
event_type: str
|
||||
timestamp: datetime
|
||||
payload: dict
|
||||
|
||||
class Config:
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat()
|
||||
}
|
||||
|
||||
|
||||
class WebhookResponse(BaseModel):
|
||||
"""Webhook response model"""
|
||||
success: bool
|
||||
message: str
|
||||
event_id: Optional[str] = None
|
||||
job_name: Optional[str] = None
|
||||
environment: Optional[str] = None
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
class Config:
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat()
|
||||
}
|
||||
@ -1,20 +0,0 @@
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime, JSON
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class ProjectMapping(Base):
|
||||
"""Project mapping model"""
|
||||
__tablename__ = "project_mappings"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
repository_name = Column(String(255), nullable=False, unique=True, index=True)
|
||||
default_job = Column(String(255), nullable=False)
|
||||
branch_jobs = Column(JSON, nullable=False, default=list) # [{"branch": "dev", "job": "dev-build"}]
|
||||
branch_patterns = Column(JSON, nullable=False, default=list) # [{"pattern": "feature/*", "job": "feature-build"}]
|
||||
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<ProjectMapping(id={self.id}, repository={self.repository_name}, default_job={self.default_job})>"
|
||||
@ -1,22 +0,0 @@
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime, Text
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class TriggerLog(Base):
|
||||
"""Trigger log model"""
|
||||
__tablename__ = "trigger_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
repository_name = Column(String(255), nullable=False, index=True)
|
||||
branch_name = Column(String(255), nullable=False, index=True)
|
||||
commit_sha = Column(String(64), nullable=False)
|
||||
job_name = Column(String(255), nullable=False)
|
||||
status = Column(String(50), nullable=False, index=True) # success, failed, pending
|
||||
error_message = Column(Text, nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<TriggerLog(id={self.id}, repository={self.repository_name}, branch={self.branch_name}, status={self.status})>"
|
||||
@ -1,380 +0,0 @@
|
||||
"""
|
||||
Database service
|
||||
Implements project mapping, branch pattern matching, and related features
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
import re
|
||||
from sqlalchemy import create_engine, Column, Integer, String, DateTime, Text, ForeignKey
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker, relationship
|
||||
from sqlalchemy.sql import text
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
Base = declarative_base()
|
||||
|
||||
# Database models
|
||||
class APIKey(Base):
|
||||
"""API key model"""
|
||||
__tablename__ = "api_keys"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
key = Column(String(255), unique=True, nullable=False)
|
||||
description = Column(Text)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
class ProjectMapping(Base):
|
||||
"""Project mapping model"""
|
||||
__tablename__ = "project_mappings"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
repository_name = Column(String(255), unique=True, nullable=False)
|
||||
default_job = Column(String(255))
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
# Relationships
|
||||
branch_jobs = relationship("BranchJob", back_populates="project", cascade="all, delete-orphan")
|
||||
branch_patterns = relationship("BranchPattern", back_populates="project", cascade="all, delete-orphan")
|
||||
|
||||
class BranchJob(Base):
|
||||
"""Branch job mapping model"""
|
||||
__tablename__ = "branch_jobs"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
project_id = Column(Integer, ForeignKey("project_mappings.id"), nullable=False)
|
||||
branch_name = Column(String(255), nullable=False)
|
||||
job_name = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
# Relationship
|
||||
project = relationship("ProjectMapping", back_populates="branch_jobs")
|
||||
|
||||
class BranchPattern(Base):
|
||||
"""Branch pattern mapping model"""
|
||||
__tablename__ = "branch_patterns"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
project_id = Column(Integer, ForeignKey("project_mappings.id"), nullable=False)
|
||||
pattern = Column(String(255), nullable=False)
|
||||
job_name = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
# Relationship
|
||||
project = relationship("ProjectMapping", back_populates="branch_patterns")
|
||||
|
||||
class TriggerLog(Base):
|
||||
"""Trigger log model"""
|
||||
__tablename__ = "trigger_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
repository_name = Column(String(255), nullable=False)
|
||||
branch_name = Column(String(255), nullable=False)
|
||||
commit_sha = Column(String(255), nullable=False)
|
||||
job_name = Column(String(255), nullable=False)
|
||||
status = Column(String(50), nullable=False)
|
||||
error_message = Column(Text)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
class DatabaseService:
|
||||
"""Database service"""
|
||||
|
||||
def __init__(self):
|
||||
self.settings = get_settings()
|
||||
self.engine = None
|
||||
self.SessionLocal = None
|
||||
self._init_database()
|
||||
|
||||
def _init_database(self):
|
||||
"""Initialize database"""
|
||||
try:
|
||||
self.engine = create_engine(
|
||||
self.settings.database.url,
|
||||
echo=self.settings.database.echo,
|
||||
pool_size=self.settings.database.pool_size,
|
||||
max_overflow=self.settings.database.max_overflow
|
||||
)
|
||||
|
||||
# Create tables
|
||||
Base.metadata.create_all(bind=self.engine)
|
||||
|
||||
# Create session factory
|
||||
self.SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=self.engine)
|
||||
|
||||
logger.info("Database initialized successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize database", error=str(e))
|
||||
raise
|
||||
|
||||
def get_session(self):
|
||||
"""Get database session"""
|
||||
return self.SessionLocal()
|
||||
|
||||
async def get_project_mapping(self, repository_name: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get project mapping
|
||||
Args:
|
||||
repository_name: repository name
|
||||
Returns:
|
||||
Dict: project mapping info
|
||||
"""
|
||||
try:
|
||||
def _get_mapping():
|
||||
session = self.get_session()
|
||||
try:
|
||||
project = session.query(ProjectMapping).filter(
|
||||
ProjectMapping.repository_name == repository_name
|
||||
).first()
|
||||
|
||||
if not project:
|
||||
return None
|
||||
|
||||
# Build return data
|
||||
result = {
|
||||
"id": project.id,
|
||||
"repository_name": project.repository_name,
|
||||
"default_job": project.default_job,
|
||||
"branch_jobs": [],
|
||||
"branch_patterns": []
|
||||
}
|
||||
|
||||
# Add branch job mappings
|
||||
for branch_job in project.branch_jobs:
|
||||
result["branch_jobs"].append({
|
||||
"id": branch_job.id,
|
||||
"branch_name": branch_job.branch_name,
|
||||
"job_name": branch_job.job_name
|
||||
})
|
||||
|
||||
# Add branch pattern mappings
|
||||
for pattern in project.branch_patterns:
|
||||
result["branch_patterns"].append({
|
||||
"id": pattern.id,
|
||||
"pattern": pattern.pattern,
|
||||
"job_name": pattern.job_name
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
# Run DB operation in thread pool
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(None, _get_mapping)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get project mapping",
|
||||
repository_name=repository_name, error=str(e))
|
||||
return None
|
||||
|
||||
async def determine_job_name(self, repository_name: str, branch_name: str) -> Optional[str]:
|
||||
"""
|
||||
Determine job name by branch
|
||||
Args:
|
||||
repository_name: repository name
|
||||
branch_name: branch name
|
||||
Returns:
|
||||
str: job name
|
||||
"""
|
||||
try:
|
||||
project = await self.get_project_mapping(repository_name)
|
||||
if not project:
|
||||
return None
|
||||
|
||||
# 1. Check exact branch match
|
||||
for branch_job in project["branch_jobs"]:
|
||||
if branch_job["branch_name"] == branch_name:
|
||||
logger.debug("Found exact branch match",
|
||||
branch=branch_name, job=branch_job["job_name"])
|
||||
return branch_job["job_name"]
|
||||
|
||||
# 2. Check pattern match
|
||||
for pattern in project["branch_patterns"]:
|
||||
try:
|
||||
if re.match(pattern["pattern"], branch_name):
|
||||
logger.debug("Branch matched pattern",
|
||||
branch=branch_name, pattern=pattern["pattern"],
|
||||
job=pattern["job_name"])
|
||||
return pattern["job_name"]
|
||||
except re.error as e:
|
||||
logger.error("Invalid regex pattern",
|
||||
pattern=pattern["pattern"], error=str(e))
|
||||
continue
|
||||
|
||||
# 3. Use default job
|
||||
if project["default_job"]:
|
||||
logger.debug("Using default job",
|
||||
branch=branch_name, job=project["default_job"])
|
||||
return project["default_job"]
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to determine job name",
|
||||
repository_name=repository_name, branch_name=branch_name,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def log_trigger(self, log_data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Log trigger event
|
||||
Args:
|
||||
log_data: log data
|
||||
Returns:
|
||||
bool: success or not
|
||||
"""
|
||||
try:
|
||||
def _log_trigger():
|
||||
session = self.get_session()
|
||||
try:
|
||||
log = TriggerLog(
|
||||
repository_name=log_data["repository_name"],
|
||||
branch_name=log_data["branch_name"],
|
||||
commit_sha=log_data["commit_sha"],
|
||||
job_name=log_data["job_name"],
|
||||
status=log_data["status"],
|
||||
error_message=log_data.get("error_message")
|
||||
)
|
||||
|
||||
session.add(log)
|
||||
session.commit()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error("Failed to log trigger", error=str(e))
|
||||
return False
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(None, _log_trigger)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to log trigger", error=str(e))
|
||||
return False
|
||||
|
||||
async def get_trigger_logs(self, repository_name: str = None,
|
||||
branch_name: str = None, limit: int = 100) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get trigger logs
|
||||
Args:
|
||||
repository_name: repository name (optional)
|
||||
branch_name: branch name (optional)
|
||||
limit: limit number
|
||||
Returns:
|
||||
List: log list
|
||||
"""
|
||||
try:
|
||||
def _get_logs():
|
||||
session = self.get_session()
|
||||
try:
|
||||
query = session.query(TriggerLog)
|
||||
|
||||
if repository_name:
|
||||
query = query.filter(TriggerLog.repository_name == repository_name)
|
||||
|
||||
if branch_name:
|
||||
query = query.filter(TriggerLog.branch_name == branch_name)
|
||||
|
||||
logs = query.order_by(TriggerLog.created_at.desc()).limit(limit).all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": log.id,
|
||||
"repository_name": log.repository_name,
|
||||
"branch_name": log.branch_name,
|
||||
"commit_sha": log.commit_sha,
|
||||
"job_name": log.job_name,
|
||||
"status": log.status,
|
||||
"error_message": log.error_message,
|
||||
"created_at": log.created_at.isoformat()
|
||||
}
|
||||
for log in logs
|
||||
]
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(None, _get_logs)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get trigger logs", error=str(e))
|
||||
return []
|
||||
|
||||
async def create_project_mapping(self, mapping_data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Create project mapping
|
||||
Args:
|
||||
mapping_data: mapping data
|
||||
Returns:
|
||||
bool: success or not
|
||||
"""
|
||||
try:
|
||||
def _create_mapping():
|
||||
session = self.get_session()
|
||||
try:
|
||||
# Create project mapping
|
||||
project = ProjectMapping(
|
||||
repository_name=mapping_data["repository_name"],
|
||||
default_job=mapping_data.get("default_job")
|
||||
)
|
||||
|
||||
session.add(project)
|
||||
session.flush() # Get ID
|
||||
|
||||
# Add branch job mappings
|
||||
for branch_job in mapping_data.get("branch_jobs", []):
|
||||
job = BranchJob(
|
||||
project_id=project.id,
|
||||
branch_name=branch_job["branch_name"],
|
||||
job_name=branch_job["job_name"]
|
||||
)
|
||||
session.add(job)
|
||||
|
||||
# Add branch pattern mappings
|
||||
for pattern in mapping_data.get("branch_patterns", []):
|
||||
pattern_obj = BranchPattern(
|
||||
project_id=project.id,
|
||||
pattern=pattern["pattern"],
|
||||
job_name=pattern["job_name"]
|
||||
)
|
||||
session.add(pattern_obj)
|
||||
|
||||
session.commit()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error("Failed to create project mapping", error=str(e))
|
||||
return False
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(None, _create_mapping)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create project mapping", error=str(e))
|
||||
return False
|
||||
|
||||
# Global database service instance
|
||||
_database_service: Optional[DatabaseService] = None
|
||||
|
||||
def get_database_service() -> DatabaseService:
|
||||
"""Get database service instance"""
|
||||
global _database_service
|
||||
if _database_service is None:
|
||||
_database_service = DatabaseService()
|
||||
return _database_service
|
||||
@ -1,213 +0,0 @@
|
||||
"""
|
||||
Deduplication service
|
||||
Implements deduplication strategy based on commit hash + branch
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
import json
|
||||
from typing import Optional, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
from redis import asyncio as aioredis
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class DeduplicationService:
|
||||
"""Deduplication service"""
|
||||
|
||||
def __init__(self, redis_client: aioredis.Redis):
|
||||
self.redis = redis_client
|
||||
self.settings = get_settings()
|
||||
self.cache_prefix = "webhook:dedup:"
|
||||
|
||||
async def is_duplicate(self, dedup_key: str) -> bool:
|
||||
"""
|
||||
Check if the event is a duplicate
|
||||
Args:
|
||||
dedup_key: deduplication key (commit_hash:branch)
|
||||
Returns:
|
||||
bool: True if duplicate, False if new event
|
||||
"""
|
||||
if not self.settings.deduplication.enabled:
|
||||
return False
|
||||
|
||||
try:
|
||||
cache_key = f"{self.cache_prefix}{dedup_key}"
|
||||
|
||||
# Check if in cache
|
||||
exists = await self.redis.exists(cache_key)
|
||||
if exists:
|
||||
logger.info("Duplicate event detected", dedup_key=dedup_key)
|
||||
return True
|
||||
|
||||
# Record new event
|
||||
await self._record_event(cache_key, dedup_key)
|
||||
logger.info("New event recorded", dedup_key=dedup_key)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking duplication",
|
||||
dedup_key=dedup_key, error=str(e))
|
||||
# Allow through on error to avoid blocking
|
||||
return False
|
||||
|
||||
async def _record_event(self, cache_key: str, dedup_key: str):
|
||||
"""Record event to cache"""
|
||||
try:
|
||||
# Set cache, TTL is deduplication window
|
||||
ttl = self.settings.deduplication.cache_ttl
|
||||
await self.redis.setex(cache_key, ttl, json.dumps({
|
||||
"dedup_key": dedup_key,
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"ttl": ttl
|
||||
}))
|
||||
|
||||
# Also record to window cache
|
||||
window_key = f"{self.cache_prefix}window:{dedup_key}"
|
||||
window_ttl = self.settings.deduplication.window_seconds
|
||||
await self.redis.setex(window_key, window_ttl, "1")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error recording event",
|
||||
cache_key=cache_key, error=str(e))
|
||||
|
||||
async def get_event_info(self, dedup_key: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get event info"""
|
||||
try:
|
||||
cache_key = f"{self.cache_prefix}{dedup_key}"
|
||||
data = await self.redis.get(cache_key)
|
||||
if data:
|
||||
return json.loads(data)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error("Error getting event info",
|
||||
dedup_key=dedup_key, error=str(e))
|
||||
return None
|
||||
|
||||
async def clear_event(self, dedup_key: str) -> bool:
|
||||
"""Clear event record"""
|
||||
try:
|
||||
cache_key = f"{self.cache_prefix}{dedup_key}"
|
||||
window_key = f"{self.cache_prefix}window:{dedup_key}"
|
||||
|
||||
# Delete both cache keys
|
||||
await self.redis.delete(cache_key, window_key)
|
||||
logger.info("Event cleared", dedup_key=dedup_key)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error clearing event",
|
||||
dedup_key=dedup_key, error=str(e))
|
||||
return False
|
||||
|
||||
async def get_stats(self) -> Dict[str, Any]:
|
||||
"""Get deduplication statistics"""
|
||||
try:
|
||||
# Get all deduplication keys
|
||||
pattern = f"{self.cache_prefix}*"
|
||||
keys = await self.redis.keys(pattern)
|
||||
|
||||
# Count different types of keys
|
||||
total_keys = len(keys)
|
||||
window_keys = len([k for k in keys if b"window:" in k])
|
||||
event_keys = total_keys - window_keys
|
||||
|
||||
# Get config info
|
||||
config = {
|
||||
"enabled": self.settings.deduplication.enabled,
|
||||
"window_seconds": self.settings.deduplication.window_seconds,
|
||||
"cache_ttl": self.settings.deduplication.cache_ttl,
|
||||
"strategy": self.settings.deduplication.strategy
|
||||
}
|
||||
|
||||
return {
|
||||
"total_keys": total_keys,
|
||||
"window_keys": window_keys,
|
||||
"event_keys": event_keys,
|
||||
"config": config,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting deduplication stats", error=str(e))
|
||||
return {"error": str(e)}
|
||||
|
||||
async def cleanup_expired_events(self) -> int:
|
||||
"""Clean up expired events"""
|
||||
try:
|
||||
pattern = f"{self.cache_prefix}*"
|
||||
keys = await self.redis.keys(pattern)
|
||||
|
||||
cleaned_count = 0
|
||||
for key in keys:
|
||||
# Check TTL
|
||||
ttl = await self.redis.ttl(key)
|
||||
if ttl <= 0:
|
||||
await self.redis.delete(key)
|
||||
cleaned_count += 1
|
||||
|
||||
if cleaned_count > 0:
|
||||
logger.info("Cleaned up expired events", count=cleaned_count)
|
||||
|
||||
return cleaned_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error cleaning up expired events", error=str(e))
|
||||
return 0
|
||||
|
||||
def generate_dedup_key(self, commit_hash: str, branch: str) -> str:
|
||||
"""
|
||||
Generate deduplication key
|
||||
Args:
|
||||
commit_hash: commit hash
|
||||
branch: branch name
|
||||
Returns:
|
||||
str: deduplication key
|
||||
"""
|
||||
if self.settings.deduplication.strategy == "commit_branch":
|
||||
return f"{commit_hash}:{branch}"
|
||||
elif self.settings.deduplication.strategy == "commit_only":
|
||||
return commit_hash
|
||||
elif self.settings.deduplication.strategy == "branch_only":
|
||||
return branch
|
||||
else:
|
||||
# Default use commit_hash:branch
|
||||
return f"{commit_hash}:{branch}"
|
||||
|
||||
async def is_in_window(self, dedup_key: str) -> bool:
|
||||
"""
|
||||
Check if in deduplication time window
|
||||
Args:
|
||||
dedup_key: deduplication key
|
||||
Returns:
|
||||
bool: True if in window
|
||||
"""
|
||||
try:
|
||||
window_key = f"{self.cache_prefix}window:{dedup_key}"
|
||||
exists = await self.redis.exists(window_key)
|
||||
return bool(exists)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking window",
|
||||
dedup_key=dedup_key, error=str(e))
|
||||
return False
|
||||
|
||||
# Global deduplication service instance
|
||||
_dedup_service: Optional[DeduplicationService] = None
|
||||
|
||||
def get_deduplication_service() -> DeduplicationService:
|
||||
"""Get deduplication service instance"""
|
||||
global _dedup_service
|
||||
if _dedup_service is None:
|
||||
# Should get Redis client from dependency injection
|
||||
# In actual use, should be passed in
|
||||
raise RuntimeError("DeduplicationService not initialized")
|
||||
return _dedup_service
|
||||
|
||||
def set_deduplication_service(service: DeduplicationService):
|
||||
"""Set deduplication service instance"""
|
||||
global _dedup_service
|
||||
_dedup_service = service
|
||||
@ -1,100 +0,0 @@
|
||||
"""
|
||||
Jenkins service
|
||||
Provides interaction with Jenkins
|
||||
"""
|
||||
|
||||
import aiohttp
|
||||
import structlog
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class JenkinsService:
|
||||
"""Jenkins service class"""
|
||||
|
||||
def __init__(self):
|
||||
self.settings = get_settings()
|
||||
self.base_url = self.settings.jenkins.url
|
||||
self.username = self.settings.jenkins.username
|
||||
self.token = self.settings.jenkins.token
|
||||
self.timeout = self.settings.jenkins.timeout
|
||||
|
||||
async def test_connection(self) -> bool:
|
||||
"""Test Jenkins connection"""
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
auth = aiohttp.BasicAuth(self.username, self.token)
|
||||
async with session.get(
|
||||
f"{self.base_url}/api/json",
|
||||
auth=auth,
|
||||
timeout=aiohttp.ClientTimeout(total=self.timeout)
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
logger.info("Jenkins connection test successful")
|
||||
return True
|
||||
else:
|
||||
logger.warning(f"Jenkins connection test failed with status {response.status}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Jenkins connection test failed: {str(e)}")
|
||||
return False
|
||||
|
||||
async def trigger_job(self, job_name: str, parameters: Optional[Dict[str, Any]] = None) -> bool:
|
||||
"""Trigger Jenkins job"""
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
auth = aiohttp.BasicAuth(self.username, self.token)
|
||||
|
||||
# Build request URL
|
||||
url = f"{self.base_url}/job/{job_name}/build"
|
||||
|
||||
# If parameters, use parameterized build
|
||||
if parameters:
|
||||
url = f"{self.base_url}/job/{job_name}/buildWithParameters"
|
||||
|
||||
async with session.post(
|
||||
url,
|
||||
auth=auth,
|
||||
params=parameters or {},
|
||||
timeout=aiohttp.ClientTimeout(total=self.timeout)
|
||||
) as response:
|
||||
if response.status in [200, 201]:
|
||||
logger.info(f"Successfully triggered Jenkins job: {job_name}")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"Failed to trigger Jenkins job {job_name}: {response.status}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Error triggering Jenkins job {job_name}: {str(e)}")
|
||||
return False
|
||||
|
||||
async def get_job_info(self, job_name: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get job info"""
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
auth = aiohttp.BasicAuth(self.username, self.token)
|
||||
async with session.get(
|
||||
f"{self.base_url}/job/{job_name}/api/json",
|
||||
auth=auth,
|
||||
timeout=aiohttp.ClientTimeout(total=self.timeout)
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
return await response.json()
|
||||
else:
|
||||
logger.warning(f"Failed to get job info for {job_name}: {response.status}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting job info for {job_name}: {str(e)}")
|
||||
return None
|
||||
|
||||
# Global service instance
|
||||
_jenkins_service = None
|
||||
|
||||
def get_jenkins_service() -> JenkinsService:
|
||||
"""Get Jenkins service instance"""
|
||||
global _jenkins_service
|
||||
if _jenkins_service is None:
|
||||
_jenkins_service = JenkinsService()
|
||||
return _jenkins_service
|
||||
@ -1,70 +0,0 @@
|
||||
"""
|
||||
Queue service
|
||||
Provides task queue management features
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class QueueService:
|
||||
"""Queue service class"""
|
||||
|
||||
def __init__(self):
|
||||
self.active_workers = 0
|
||||
self.queue_size = 0
|
||||
self.total_processed = 0
|
||||
self.total_failed = 0
|
||||
self._stats = {
|
||||
"active_workers": 0,
|
||||
"queue_size": 0,
|
||||
"total_processed": 0,
|
||||
"total_failed": 0
|
||||
}
|
||||
|
||||
async def get_stats(self) -> Dict[str, Any]:
|
||||
"""Get queue statistics"""
|
||||
return self._stats.copy()
|
||||
|
||||
async def increment_processed(self):
|
||||
"""Increase processed task count"""
|
||||
self.total_processed += 1
|
||||
self._stats["total_processed"] = self.total_processed
|
||||
|
||||
async def increment_failed(self):
|
||||
"""Increase failed task count"""
|
||||
self.total_failed += 1
|
||||
self._stats["total_failed"] = self.total_failed
|
||||
|
||||
async def set_active_workers(self, count: int):
|
||||
"""Set number of active workers"""
|
||||
self.active_workers = count
|
||||
self._stats["active_workers"] = count
|
||||
|
||||
async def set_queue_size(self, size: int):
|
||||
"""Set queue size"""
|
||||
self.queue_size = size
|
||||
self._stats["queue_size"] = size
|
||||
|
||||
async def add_to_queue(self):
|
||||
"""Add task to queue"""
|
||||
self.queue_size += 1
|
||||
self._stats["queue_size"] = self.queue_size
|
||||
|
||||
async def remove_from_queue(self):
|
||||
"""Remove task from queue"""
|
||||
if self.queue_size > 0:
|
||||
self.queue_size -= 1
|
||||
self._stats["queue_size"] = self.queue_size
|
||||
|
||||
# Global service instance
|
||||
_queue_service = None
|
||||
|
||||
def get_queue_service() -> QueueService:
|
||||
"""Get queue service instance"""
|
||||
global _queue_service
|
||||
if _queue_service is None:
|
||||
_queue_service = QueueService()
|
||||
return _queue_service
|
||||
@ -1,277 +0,0 @@
|
||||
"""
|
||||
Webhook processing service
|
||||
Implements intelligent dispatch, task queueing, and deduplication strategy
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import Optional, Dict, Any
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from celery import Celery
|
||||
|
||||
from app.config import get_settings
|
||||
from app.models.gitea import GiteaWebhook, WebhookResponse
|
||||
from app.services.dedup_service import DeduplicationService
|
||||
from app.services.jenkins_service import JenkinsService
|
||||
from app.services.database_service import get_database_service
|
||||
from app.tasks.jenkins_tasks import trigger_jenkins_job
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class WebhookService:
|
||||
"""Webhook processing service"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
dedup_service: DeduplicationService,
|
||||
jenkins_service: JenkinsService,
|
||||
celery_app: Celery
|
||||
):
|
||||
self.dedup_service = dedup_service
|
||||
self.jenkins_service = jenkins_service
|
||||
self.celery_app = celery_app
|
||||
self.settings = get_settings()
|
||||
self.db_service = get_database_service()
|
||||
|
||||
async def process_webhook(self, webhook: GiteaWebhook) -> WebhookResponse:
|
||||
"""
|
||||
Process webhook event
|
||||
Args:
|
||||
webhook: Gitea webhook data
|
||||
Returns:
|
||||
WebhookResponse: processing result
|
||||
"""
|
||||
try:
|
||||
# 1. Validate event type
|
||||
if not webhook.is_push_event():
|
||||
return WebhookResponse(
|
||||
success=True,
|
||||
message="Non-push event ignored",
|
||||
event_id=webhook.get_event_id()
|
||||
)
|
||||
|
||||
# 2. Extract key information
|
||||
branch = webhook.get_branch_name()
|
||||
commit_hash = webhook.get_commit_hash()
|
||||
repository = webhook.repository.full_name
|
||||
|
||||
logger.info("Processing webhook",
|
||||
repository=repository,
|
||||
branch=branch,
|
||||
commit_hash=commit_hash)
|
||||
|
||||
# 3. Deduplication check
|
||||
dedup_key = self.dedup_service.generate_dedup_key(commit_hash, branch)
|
||||
if await self.dedup_service.is_duplicate(dedup_key):
|
||||
return WebhookResponse(
|
||||
success=True,
|
||||
message="Duplicate event ignored",
|
||||
event_id=webhook.get_event_id()
|
||||
)
|
||||
|
||||
# 4. Get project mapping and job name
|
||||
job_name = await self._determine_job_name(repository, branch)
|
||||
if not job_name:
|
||||
return WebhookResponse(
|
||||
success=True,
|
||||
message=f"No Jenkins job mapping for repository: {repository}, branch: {branch}",
|
||||
event_id=webhook.get_event_id()
|
||||
)
|
||||
|
||||
# 5. Prepare job parameters
|
||||
job_params = self._prepare_job_parameters(webhook, job_name)
|
||||
|
||||
# 6. Submit job to queue
|
||||
task_result = await self._submit_job_to_queue(
|
||||
webhook, job_name, job_params
|
||||
)
|
||||
|
||||
if task_result:
|
||||
return WebhookResponse(
|
||||
success=True,
|
||||
message="Job queued successfully",
|
||||
event_id=webhook.get_event_id(),
|
||||
job_name=job_name
|
||||
)
|
||||
else:
|
||||
return WebhookResponse(
|
||||
success=False,
|
||||
message="Failed to queue job",
|
||||
event_id=webhook.get_event_id()
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error processing webhook",
|
||||
repository=webhook.repository.full_name,
|
||||
error=str(e))
|
||||
return WebhookResponse(
|
||||
success=False,
|
||||
message=f"Internal server error: {str(e)}",
|
||||
event_id=webhook.get_event_id()
|
||||
)
|
||||
|
||||
async def _determine_job_name(self, repository: str, branch: str) -> Optional[str]:
|
||||
"""Determine job name by repository and branch"""
|
||||
# First try to get project mapping from database
|
||||
job_name = await self.db_service.determine_job_name(repository, branch)
|
||||
if job_name:
|
||||
return job_name
|
||||
|
||||
# If not found in database, use environment dispatch from config
|
||||
environment = self.settings.get_environment_for_branch(branch)
|
||||
if environment:
|
||||
return environment.jenkins_job
|
||||
|
||||
return None
|
||||
|
||||
def _prepare_job_parameters(self, webhook: GiteaWebhook, job_name: str) -> Dict[str, str]:
|
||||
"""Prepare Jenkins job parameters"""
|
||||
author_info = webhook.get_author_info()
|
||||
|
||||
return {
|
||||
"BRANCH_NAME": webhook.get_branch_name(),
|
||||
"COMMIT_SHA": webhook.get_commit_hash(),
|
||||
"REPOSITORY_URL": webhook.repository.clone_url,
|
||||
"REPOSITORY_NAME": webhook.repository.full_name,
|
||||
"PUSHER_NAME": author_info["name"],
|
||||
"PUSHER_EMAIL": author_info["email"],
|
||||
"PUSHER_USERNAME": author_info["username"],
|
||||
"COMMIT_MESSAGE": webhook.get_commit_message(),
|
||||
"JOB_NAME": job_name,
|
||||
"WEBHOOK_EVENT_ID": webhook.get_event_id(),
|
||||
"TRIGGER_TIME": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
async def _submit_job_to_queue(
|
||||
self,
|
||||
webhook: GiteaWebhook,
|
||||
job_name: str,
|
||||
job_params: Dict[str, str]
|
||||
) -> bool:
|
||||
"""Submit job to Celery queue"""
|
||||
try:
|
||||
# Create task
|
||||
task_kwargs = {
|
||||
"job_name": job_name,
|
||||
"jenkins_url": self.settings.jenkins.url,
|
||||
"parameters": job_params,
|
||||
"event_id": webhook.get_event_id(),
|
||||
"repository": webhook.repository.full_name,
|
||||
"branch": webhook.get_branch_name(),
|
||||
"commit_hash": webhook.get_commit_hash(),
|
||||
"priority": 1 # Default priority
|
||||
}
|
||||
|
||||
# Submit to Celery queue
|
||||
task = self.celery_app.send_task(
|
||||
"app.tasks.jenkins_tasks.trigger_jenkins_job",
|
||||
kwargs=task_kwargs,
|
||||
priority=task_kwargs["priority"]
|
||||
)
|
||||
|
||||
logger.info("Job submitted to queue",
|
||||
task_id=task.id,
|
||||
job_name=job_name,
|
||||
repository=webhook.repository.full_name,
|
||||
branch=webhook.get_branch_name())
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to submit job to queue",
|
||||
job_name=job_name,
|
||||
error=str(e))
|
||||
return False
|
||||
|
||||
async def get_webhook_stats(self) -> Dict[str, Any]:
|
||||
"""Get webhook processing statistics"""
|
||||
try:
|
||||
# Get queue stats
|
||||
queue_stats = await self._get_queue_stats()
|
||||
|
||||
# Get deduplication stats
|
||||
dedup_stats = await self.dedup_service.get_stats()
|
||||
|
||||
# Get environment config
|
||||
environments = {}
|
||||
for name, config in self.settings.environments.items():
|
||||
environments[name] = {
|
||||
"branches": config.branches,
|
||||
"jenkins_job": config.jenkins_job,
|
||||
"jenkins_url": config.jenkins_url,
|
||||
"priority": config.priority
|
||||
}
|
||||
|
||||
return {
|
||||
"queue": queue_stats,
|
||||
"deduplication": dedup_stats,
|
||||
"environments": environments,
|
||||
"config": {
|
||||
"max_concurrent": self.settings.queue.max_concurrent,
|
||||
"max_retries": self.settings.queue.max_retries,
|
||||
"retry_delay": self.settings.queue.retry_delay
|
||||
},
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting webhook stats", error=str(e))
|
||||
return {"error": str(e)}
|
||||
|
||||
async def _get_queue_stats(self) -> Dict[str, Any]:
|
||||
"""Get queue statistics"""
|
||||
try:
|
||||
# Get Celery queue stats
|
||||
inspect = self.celery_app.control.inspect()
|
||||
|
||||
# Active tasks
|
||||
active = inspect.active()
|
||||
active_count = sum(len(tasks) for tasks in active.values()) if active else 0
|
||||
|
||||
# Reserved tasks
|
||||
reserved = inspect.reserved()
|
||||
reserved_count = sum(len(tasks) for tasks in reserved.values()) if reserved else 0
|
||||
|
||||
# Registered workers
|
||||
registered = inspect.registered()
|
||||
worker_count = len(registered) if registered else 0
|
||||
|
||||
return {
|
||||
"active_tasks": active_count,
|
||||
"queued_tasks": reserved_count,
|
||||
"worker_count": worker_count,
|
||||
"queue_length": active_count + reserved_count
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting queue stats", error=str(e))
|
||||
return {"error": str(e)}
|
||||
|
||||
async def clear_queue(self) -> Dict[str, Any]:
|
||||
"""Clear queue"""
|
||||
try:
|
||||
# Revoke all active tasks
|
||||
inspect = self.celery_app.control.inspect()
|
||||
active = inspect.active()
|
||||
|
||||
revoked_count = 0
|
||||
if active:
|
||||
for worker, tasks in active.items():
|
||||
for task in tasks:
|
||||
self.celery_app.control.revoke(task["id"], terminate=True)
|
||||
revoked_count += 1
|
||||
|
||||
logger.info("Queue cleared", revoked_count=revoked_count)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"revoked_count": revoked_count,
|
||||
"message": f"Cleared {revoked_count} tasks from queue"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error clearing queue", error=str(e))
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
/* Bootstrap Icons 1.7.2 CSS placeholder. 请用官方文件替换此内容,并确保 fonts 目录下有对应字体文件。*/
|
||||
File diff suppressed because one or more lines are too long
@ -1,83 +0,0 @@
|
||||
.login-container {
|
||||
height: 100vh;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
background-color: #f8f9fa;
|
||||
}
|
||||
|
||||
.login-form {
|
||||
width: 100%;
|
||||
max-width: 330px;
|
||||
padding: 15px;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
z-index: 100;
|
||||
padding: 48px 0 0;
|
||||
box-shadow: inset -1px 0 0 rgba(0, 0, 0, .1);
|
||||
}
|
||||
|
||||
.sidebar-sticky {
|
||||
position: relative;
|
||||
top: 0;
|
||||
height: calc(100vh - 48px);
|
||||
padding-top: .5rem;
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.navbar-brand {
|
||||
padding-top: .75rem;
|
||||
padding-bottom: .75rem;
|
||||
font-size: 1rem;
|
||||
background-color: rgba(0, 0, 0, .25);
|
||||
box-shadow: inset -1px 0 0 rgba(0, 0, 0, .25);
|
||||
}
|
||||
|
||||
.navbar .navbar-toggler {
|
||||
top: .25rem;
|
||||
right: 1rem;
|
||||
}
|
||||
|
||||
.main-content {
|
||||
padding-top: 48px;
|
||||
}
|
||||
|
||||
.card {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.health-indicator {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
border-radius: 50%;
|
||||
display: inline-block;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.health-indicator.healthy {
|
||||
background-color: #28a745;
|
||||
}
|
||||
|
||||
.health-indicator.unhealthy {
|
||||
background-color: #dc3545;
|
||||
}
|
||||
|
||||
.log-entry {
|
||||
font-family: monospace;
|
||||
white-space: pre-wrap;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.api-key {
|
||||
font-family: monospace;
|
||||
background-color: #f8f9fa;
|
||||
padding: 0.5rem;
|
||||
border-radius: 0.25rem;
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@ -1,267 +0,0 @@
|
||||
// Global variable to store the JWT token
|
||||
let authToken = localStorage.getItem("auth_token");
|
||||
|
||||
$(document).ready(function () {
|
||||
// Initialize tooltips
|
||||
$('[data-bs-toggle="tooltip"]').tooltip();
|
||||
|
||||
// Set up AJAX defaults to include auth token
|
||||
$.ajaxSetup({
|
||||
beforeSend: function (xhr, settings) {
|
||||
// Don't add auth header for login request
|
||||
if (settings.url === "/api/auth/login") {
|
||||
return;
|
||||
}
|
||||
if (authToken) {
|
||||
xhr.setRequestHeader("Authorization", "Bearer " + authToken);
|
||||
}
|
||||
},
|
||||
error: function (xhr, status, error) {
|
||||
// If we get a 401, redirect to login
|
||||
if (xhr.status === 401) {
|
||||
localStorage.removeItem("auth_token");
|
||||
window.location.href = "/login";
|
||||
return;
|
||||
}
|
||||
handleAjaxError(xhr, status, error);
|
||||
},
|
||||
});
|
||||
|
||||
// Handle login form submission
|
||||
$("#loginForm").on("submit", function (e) {
|
||||
e.preventDefault();
|
||||
const secretKey = $("#secret_key").val();
|
||||
$("#loginError").hide();
|
||||
|
||||
$.ajax({
|
||||
url: "/api/auth/login",
|
||||
method: "POST",
|
||||
contentType: "application/json",
|
||||
data: JSON.stringify({ secret_key: secretKey }),
|
||||
success: function (response) {
|
||||
if (response && response.token) {
|
||||
// Store token and redirect
|
||||
localStorage.setItem("auth_token", response.token);
|
||||
authToken = response.token;
|
||||
window.location.href = "/dashboard";
|
||||
} else {
|
||||
$("#loginError").text("Invalid response from server").show();
|
||||
}
|
||||
},
|
||||
error: function (xhr) {
|
||||
console.error("Login error:", xhr);
|
||||
if (xhr.responseJSON && xhr.responseJSON.error) {
|
||||
$("#loginError").text(xhr.responseJSON.error).show();
|
||||
} else {
|
||||
$("#loginError").text("Login failed. Please try again.").show();
|
||||
}
|
||||
$("#secret_key").val("").focus();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
// Only load dashboard data if we're on the dashboard page
|
||||
if (window.location.pathname === "/dashboard") {
|
||||
if (!authToken) {
|
||||
window.location.href = "/login";
|
||||
return;
|
||||
}
|
||||
|
||||
// Load initial data
|
||||
loadProjects();
|
||||
loadAPIKeys();
|
||||
loadLogs();
|
||||
checkHealth();
|
||||
|
||||
// Set up periodic health check
|
||||
setInterval(checkHealth, 30000);
|
||||
}
|
||||
|
||||
// Project management
|
||||
$("#addProjectForm").on("submit", function (e) {
|
||||
e.preventDefault();
|
||||
const projectData = {
|
||||
name: $("#projectName").val(),
|
||||
jenkinsJob: $("#jenkinsJob").val(),
|
||||
giteaRepo: $("#giteaRepo").val(),
|
||||
};
|
||||
|
||||
$.ajax({
|
||||
url: "/api/projects",
|
||||
method: "POST",
|
||||
contentType: "application/json",
|
||||
data: JSON.stringify(projectData),
|
||||
success: function () {
|
||||
$("#addProjectModal").modal("hide");
|
||||
loadProjects();
|
||||
},
|
||||
error: handleAjaxError,
|
||||
});
|
||||
});
|
||||
|
||||
// API key management
|
||||
$("#generateKeyForm").on("submit", function (e) {
|
||||
e.preventDefault();
|
||||
$.ajax({
|
||||
url: "/api/keys",
|
||||
method: "POST",
|
||||
contentType: "application/json",
|
||||
data: JSON.stringify({ description: $("#keyDescription").val() }),
|
||||
success: function () {
|
||||
$("#generateKeyModal").modal("hide");
|
||||
loadAPIKeys();
|
||||
},
|
||||
error: handleAjaxError,
|
||||
});
|
||||
});
|
||||
|
||||
// Log querying
|
||||
$("#logQueryForm").on("submit", function (e) {
|
||||
e.preventDefault();
|
||||
loadLogs({
|
||||
startTime: $("#startTime").val(),
|
||||
endTime: $("#endTime").val(),
|
||||
level: $("#logLevel").val(),
|
||||
query: $("#logQuery").val(),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function loadProjects() {
|
||||
$.get("/api/projects")
|
||||
.done(function (data) {
|
||||
const tbody = $("#projectsTable tbody");
|
||||
tbody.empty();
|
||||
|
||||
data.projects.forEach(function (project) {
|
||||
tbody.append(`
|
||||
<tr>
|
||||
<td>${escapeHtml(project.name)}</td>
|
||||
<td>${escapeHtml(project.jenkinsJob)}</td>
|
||||
<td>${escapeHtml(project.giteaRepo)}</td>
|
||||
</tr>
|
||||
`);
|
||||
});
|
||||
})
|
||||
.fail(handleAjaxError);
|
||||
}
|
||||
|
||||
function loadAPIKeys() {
|
||||
$.get("/api/keys")
|
||||
.done(function (data) {
|
||||
const tbody = $("#apiKeysTable tbody");
|
||||
tbody.empty();
|
||||
|
||||
data.keys.forEach(function (key) {
|
||||
tbody.append(`
|
||||
<tr>
|
||||
<td>${escapeHtml(key.description)}</td>
|
||||
<td><code class="api-key">${escapeHtml(
|
||||
key.value
|
||||
)}</code></td>
|
||||
<td>${new Date(key.created).toLocaleString()}</td>
|
||||
<td>
|
||||
<button class="btn btn-sm btn-danger" onclick="revokeKey('${
|
||||
key.id
|
||||
}')">
|
||||
Revoke
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
`);
|
||||
});
|
||||
})
|
||||
.fail(handleAjaxError);
|
||||
}
|
||||
|
||||
function loadLogs(query = {}) {
|
||||
$.get("/api/logs", query)
|
||||
.done(function (data) {
|
||||
const logContainer = $("#logEntries");
|
||||
logContainer.empty();
|
||||
|
||||
data.logs.forEach(function (log) {
|
||||
const levelClass =
|
||||
{
|
||||
error: "text-danger",
|
||||
warn: "text-warning",
|
||||
info: "text-info",
|
||||
debug: "text-secondary",
|
||||
}[log.level] || "";
|
||||
|
||||
logContainer.append(`
|
||||
<div class="log-entry ${levelClass}">
|
||||
<small>${new Date(log.timestamp).toISOString()}</small>
|
||||
[${escapeHtml(log.level)}] ${escapeHtml(log.message)}
|
||||
</div>
|
||||
`);
|
||||
});
|
||||
})
|
||||
.fail(handleAjaxError);
|
||||
}
|
||||
|
||||
function checkHealth() {
|
||||
$.get("/health")
|
||||
.done(function (data) {
|
||||
const indicator = $(".health-indicator");
|
||||
indicator
|
||||
.removeClass("healthy unhealthy")
|
||||
.addClass(data.status === "healthy" ? "healthy" : "unhealthy");
|
||||
$("#healthStatus").text(data.status);
|
||||
})
|
||||
.fail(function () {
|
||||
const indicator = $(".health-indicator");
|
||||
indicator.removeClass("healthy").addClass("unhealthy");
|
||||
$("#healthStatus").text("unhealthy");
|
||||
});
|
||||
}
|
||||
|
||||
function deleteProject(id) {
|
||||
if (!confirm("Are you sure you want to delete this project?")) return;
|
||||
|
||||
$.ajax({
|
||||
url: `/api/projects/${id}`,
|
||||
method: "DELETE",
|
||||
success: loadProjects,
|
||||
error: handleAjaxError,
|
||||
});
|
||||
}
|
||||
|
||||
function revokeKey(id) {
|
||||
if (!confirm("Are you sure you want to revoke this API key?")) return;
|
||||
|
||||
$.ajax({
|
||||
url: `/api/keys/${id}`,
|
||||
method: "DELETE",
|
||||
success: loadAPIKeys,
|
||||
error: handleAjaxError,
|
||||
});
|
||||
}
|
||||
|
||||
function handleAjaxError(jqXHR, textStatus, errorThrown) {
|
||||
const message =
|
||||
jqXHR.responseJSON?.error || errorThrown || "An error occurred";
|
||||
alert(`Error: ${message}`);
|
||||
}
|
||||
|
||||
function escapeHtml(unsafe) {
|
||||
return unsafe
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">")
|
||||
.replace(/"/g, """)
|
||||
.replace(/'/g, "'");
|
||||
}
|
||||
|
||||
function getCookie(name) {
|
||||
const cookies = document.cookie.split(";");
|
||||
for (let cookie of cookies) {
|
||||
const [cookieName, cookieValue] = cookie.split("=").map((c) => c.trim());
|
||||
if (cookieName === name) {
|
||||
console.debug(`Found cookie ${name}`);
|
||||
return cookieValue;
|
||||
}
|
||||
}
|
||||
console.debug(`Cookie ${name} not found`);
|
||||
return null;
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@ -1,301 +0,0 @@
|
||||
"""
|
||||
Jenkins task processing
|
||||
Asynchronous Jenkins job triggering using Celery
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from celery import Celery, Task
|
||||
import httpx
|
||||
|
||||
from app.config import get_settings
|
||||
from app.services.jenkins_service import JenkinsService
|
||||
|
||||
logger = structlog.get_logger()
|
||||
settings = get_settings()
|
||||
|
||||
# Create Celery app
|
||||
celery_app = Celery(
|
||||
"gitea_webhook_ambassador",
|
||||
broker=settings.redis.url,
|
||||
backend=settings.redis.url,
|
||||
include=["app.tasks.jenkins_tasks"]
|
||||
)
|
||||
|
||||
# Celery configuration
|
||||
celery_app.conf.update(
|
||||
task_serializer="json",
|
||||
accept_content=["json"],
|
||||
result_serializer="json",
|
||||
timezone="UTC",
|
||||
enable_utc=True,
|
||||
task_track_started=True,
|
||||
task_time_limit=300, # 5 minutes timeout
|
||||
task_soft_time_limit=240, # 4 minutes soft timeout
|
||||
worker_prefetch_multiplier=1,
|
||||
worker_max_tasks_per_child=1000,
|
||||
worker_max_memory_per_child=200000, # 200MB
|
||||
task_acks_late=True,
|
||||
task_reject_on_worker_lost=True,
|
||||
task_always_eager=False, # Set to False in production
|
||||
result_expires=3600, # Result cache 1 hour
|
||||
)
|
||||
|
||||
|
||||
class JenkinsTask(Task):
|
||||
"""Jenkins task base class"""
|
||||
|
||||
abstract = True
|
||||
|
||||
def __init__(self):
|
||||
self.jenkins_service = None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
if self.jenkins_service is None:
|
||||
self.jenkins_service = JenkinsService()
|
||||
return self.run(*args, **kwargs)
|
||||
|
||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
"""Task failure callback"""
|
||||
logger.error("Task failed",
|
||||
task_id=task_id,
|
||||
task_name=self.name,
|
||||
error=str(exc),
|
||||
args=args,
|
||||
kwargs=kwargs)
|
||||
|
||||
def on_retry(self, exc, task_id, args, kwargs, einfo):
|
||||
"""Task retry callback"""
|
||||
logger.warning("Task retrying",
|
||||
task_id=task_id,
|
||||
task_name=self.name,
|
||||
error=str(exc),
|
||||
retry_count=self.request.retries)
|
||||
|
||||
def on_success(self, retval, task_id, args, kwargs):
|
||||
"""Task success callback"""
|
||||
logger.info("Task completed successfully",
|
||||
task_id=task_id,
|
||||
task_name=self.name,
|
||||
result=retval)
|
||||
|
||||
|
||||
@celery_app.task(
|
||||
bind=True,
|
||||
base=JenkinsTask,
|
||||
max_retries=3,
|
||||
default_retry_delay=60,
|
||||
autoretry_for=(Exception,),
|
||||
retry_backoff=True,
|
||||
retry_jitter=True
|
||||
)
|
||||
def trigger_jenkins_job(
|
||||
self,
|
||||
job_name: str,
|
||||
jenkins_url: str,
|
||||
parameters: Dict[str, str],
|
||||
event_id: str,
|
||||
repository: str,
|
||||
branch: str,
|
||||
commit_hash: str,
|
||||
priority: int = 1
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Trigger Jenkins job
|
||||
Args:
|
||||
job_name: Jenkins job name
|
||||
jenkins_url: Jenkins URL
|
||||
parameters: job parameters
|
||||
event_id: event ID
|
||||
repository: repository name
|
||||
branch: branch name
|
||||
commit_hash: commit hash
|
||||
priority: priority
|
||||
Returns:
|
||||
Dict: job execution result
|
||||
"""
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
logger.info("Starting Jenkins job trigger",
|
||||
task_id=self.request.id,
|
||||
job_name=job_name,
|
||||
jenkins_url=jenkins_url,
|
||||
repository=repository,
|
||||
branch=branch,
|
||||
commit_hash=commit_hash,
|
||||
priority=priority)
|
||||
|
||||
# Create Jenkins service instance
|
||||
jenkins_service = JenkinsService()
|
||||
|
||||
# Trigger Jenkins job
|
||||
result = asyncio.run(jenkins_service.trigger_job(
|
||||
job_name=job_name,
|
||||
jenkins_url=jenkins_url,
|
||||
parameters=parameters
|
||||
))
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
if result["success"]:
|
||||
logger.info("Jenkins job triggered successfully",
|
||||
task_id=self.request.id,
|
||||
job_name=job_name,
|
||||
build_number=result.get("build_number"),
|
||||
execution_time=execution_time)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"task_id": self.request.id,
|
||||
"job_name": job_name,
|
||||
"jenkins_url": jenkins_url,
|
||||
"build_number": result.get("build_number"),
|
||||
"build_url": result.get("build_url"),
|
||||
"event_id": event_id,
|
||||
"repository": repository,
|
||||
"branch": branch,
|
||||
"commit_hash": commit_hash,
|
||||
"execution_time": execution_time,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
else:
|
||||
logger.error("Jenkins job trigger failed",
|
||||
task_id=self.request.id,
|
||||
job_name=job_name,
|
||||
error=result.get("error"),
|
||||
execution_time=execution_time)
|
||||
|
||||
# Retry task
|
||||
raise self.retry(
|
||||
countdown=settings.queue.retry_delay * (2 ** self.request.retries),
|
||||
max_retries=settings.queue.max_retries
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
execution_time = time.time() - start_time
|
||||
logger.error("Unexpected error in Jenkins task",
|
||||
task_id=self.request.id,
|
||||
job_name=job_name,
|
||||
error=str(e),
|
||||
execution_time=execution_time)
|
||||
|
||||
# Retry task
|
||||
raise self.retry(
|
||||
countdown=settings.queue.retry_delay * (2 ** self.request.retries),
|
||||
max_retries=settings.queue.max_retries
|
||||
)
|
||||
|
||||
|
||||
@celery_app.task(
|
||||
bind=True,
|
||||
base=JenkinsTask,
|
||||
max_retries=2,
|
||||
default_retry_delay=30
|
||||
)
|
||||
def check_jenkins_health(
|
||||
self,
|
||||
jenkins_url: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Check Jenkins health status
|
||||
Args:
|
||||
jenkins_url: Jenkins URL
|
||||
Returns:
|
||||
Dict: health check result
|
||||
"""
|
||||
try:
|
||||
logger.info("Checking Jenkins health", jenkins_url=jenkins_url)
|
||||
|
||||
jenkins_service = JenkinsService()
|
||||
result = asyncio.run(jenkins_service.check_health(jenkins_url))
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"jenkins_url": jenkins_url,
|
||||
"healthy": result.get("healthy", False),
|
||||
"response_time": result.get("response_time"),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Jenkins health check failed",
|
||||
jenkins_url=jenkins_url,
|
||||
error=str(e))
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"jenkins_url": jenkins_url,
|
||||
"error": str(e),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
|
||||
@celery_app.task(
|
||||
bind=True,
|
||||
base=JenkinsTask
|
||||
)
|
||||
def cleanup_expired_tasks(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Clean up expired tasks
|
||||
Returns:
|
||||
Dict: cleanup result
|
||||
"""
|
||||
try:
|
||||
logger.info("Starting task cleanup")
|
||||
|
||||
# Get all tasks
|
||||
inspect = self.app.control.inspect()
|
||||
|
||||
# Clean up expired results
|
||||
cleaned_count = 0
|
||||
current_time = time.time()
|
||||
|
||||
# Add more complex cleanup logic here if needed
|
||||
# For example, clean up results older than a certain time
|
||||
|
||||
logger.info("Task cleanup completed", cleaned_count=cleaned_count)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"cleaned_count": cleaned_count,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Task cleanup failed", error=str(e))
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
|
||||
# Periodic tasks
|
||||
@celery_app.on_after_configure.connect
|
||||
def setup_periodic_tasks(sender, **kwargs):
|
||||
"""Set up periodic tasks"""
|
||||
|
||||
# Clean up expired tasks every hour
|
||||
sender.add_periodic_task(
|
||||
3600.0, # 1 hour
|
||||
cleanup_expired_tasks.s(),
|
||||
name="cleanup-expired-tasks"
|
||||
)
|
||||
|
||||
# Check Jenkins health every 5 minutes
|
||||
for env_name, env_config in settings.environments.items():
|
||||
sender.add_periodic_task(
|
||||
300.0, # 5 minutes
|
||||
check_jenkins_health.s(env_config.jenkins_url),
|
||||
name=f"check-jenkins-health-{env_name}"
|
||||
)
|
||||
|
||||
|
||||
def get_celery_app() -> Celery:
|
||||
"""Get Celery app instance"""
|
||||
return celery_app
|
||||
@ -1,210 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Dashboard - Gitea Webhook Ambassador</title>
|
||||
<link rel="stylesheet" href="/static/css/bootstrap.min.css">
|
||||
<link rel="stylesheet" href="/static/css/dashboard.css">
|
||||
<link rel="stylesheet" href="/static/css/bootstrap-icons.css">
|
||||
</head>
|
||||
<body>
|
||||
<header class="navbar navbar-dark sticky-top bg-dark flex-md-nowrap p-0 shadow">
|
||||
<a class="navbar-brand col-md-3 col-lg-2 me-0 px-3" href="#">Gitea Webhook Ambassador</a>
|
||||
<div class="navbar-nav">
|
||||
<div class="nav-item text-nowrap">
|
||||
<span class="px-3 text-white">
|
||||
<span class="health-indicator"></span>
|
||||
<span id="healthStatus">checking...</span>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<nav id="sidebarMenu" class="col-md-3 col-lg-2 d-md-block bg-light sidebar collapse">
|
||||
<div class="position-sticky pt-3">
|
||||
<ul class="nav flex-column">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link active" href="#projects" data-bs-toggle="tab">
|
||||
Projects
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="#api-keys" data-bs-toggle="tab">
|
||||
API Keys
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="#logs" data-bs-toggle="tab">
|
||||
Logs
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="#health" data-bs-toggle="tab">
|
||||
Health
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<main class="col-md-9 ms-sm-auto col-lg-10 px-md-4 main-content">
|
||||
<div class="tab-content" id="myTabContent">
|
||||
<!-- Projects Tab -->
|
||||
<div class="tab-pane fade show active" id="projects">
|
||||
<div class="d-flex justify-content-between flex-wrap flex-md-nowrap align-items-center pt-3 pb-2 mb-3 border-bottom">
|
||||
<h1 class="h2">Projects</h1>
|
||||
<button class="btn btn-primary" data-bs-toggle="modal" data-bs-target="#addProjectModal">
|
||||
Add Project
|
||||
</button>
|
||||
</div>
|
||||
<div class="table-responsive">
|
||||
<table class="table table-striped" id="projectsTable">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Jenkins Job</th>
|
||||
<th>Gitea Repository</th>
|
||||
<th>Action</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- API Keys Tab -->
|
||||
<div class="tab-pane fade" id="api-keys">
|
||||
<div class="d-flex justify-content-between flex-wrap flex-md-nowrap align-items-center pt-3 pb-2 mb-3 border-bottom">
|
||||
<h1 class="h2">API Keys</h1>
|
||||
<button class="btn btn-primary" data-bs-toggle="modal" data-bs-target="#generateKeyModal">
|
||||
Generate New Key
|
||||
</button>
|
||||
</div>
|
||||
<div class="table-responsive">
|
||||
<table class="table table-striped" id="apiKeysTable">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Description</th>
|
||||
<th>Key</th>
|
||||
<th>Created</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Logs Tab -->
|
||||
<div class="tab-pane fade" id="logs">
|
||||
<div class="d-flex justify-content-between flex-wrap flex-md-nowrap align-items-center pt-3 pb-2 mb-3 border-bottom">
|
||||
<h1 class="h2">Logs</h1>
|
||||
</div>
|
||||
<form id="logQueryForm" class="row g-3 mb-3">
|
||||
<div class="col-md-3">
|
||||
<label for="startTime" class="form-label">Start Time</label>
|
||||
<input type="datetime-local" class="form-control" id="startTime">
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="endTime" class="form-label">End Time</label>
|
||||
<input type="datetime-local" class="form-control" id="endTime">
|
||||
</div>
|
||||
<div class="col-md-2">
|
||||
<label for="logLevel" class="form-label">Log Level</label>
|
||||
<select class="form-select" id="logLevel">
|
||||
<option value="">All</option>
|
||||
<option value="error">Error</option>
|
||||
<option value="warn">Warning</option>
|
||||
<option value="info">Info</option>
|
||||
<option value="debug">Debug</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="logQuery" class="form-label">Search Query</label>
|
||||
<input type="text" class="form-control" id="logQuery" placeholder="Search logs...">
|
||||
</div>
|
||||
<div class="col-md-1">
|
||||
<label class="form-label"> </label>
|
||||
<button type="submit" class="btn btn-primary w-100">Search</button>
|
||||
</div>
|
||||
</form>
|
||||
<div id="logEntries" class="border rounded p-3 bg-light"></div>
|
||||
</div>
|
||||
|
||||
<!-- Health Tab -->
|
||||
<div class="tab-pane fade" id="health">
|
||||
<div class="d-flex justify-content-between flex-wrap flex-md-nowrap align-items-center pt-3 pb-2 mb-3 border-bottom">
|
||||
<h1 class="h2">Health Status</h1>
|
||||
</div>
|
||||
<div id="healthDetails"></div>
|
||||
<div id="statsDetails" class="mt-4"></div>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Add Project Modal -->
|
||||
<div class="modal fade" id="addProjectModal" tabindex="-1">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title">Add New Project</h5>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
|
||||
</div>
|
||||
<form id="addProjectForm">
|
||||
<div class="modal-body">
|
||||
<div class="mb-3">
|
||||
<label for="projectName" class="form-label">Project Name</label>
|
||||
<input type="text" class="form-control" id="projectName" required>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="jenkinsJob" class="form-label">Jenkins Job</label>
|
||||
<input type="text" class="form-control" id="jenkinsJob" required>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="giteaRepo" class="form-label">Gitea Repository</label>
|
||||
<input type="text" class="form-control" id="giteaRepo" required>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
|
||||
<button type="submit" class="btn btn-primary">Add Project</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Generate API Key Modal -->
|
||||
<div class="modal fade" id="generateKeyModal" tabindex="-1">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title">Generate New API Key</h5>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
|
||||
</div>
|
||||
<form id="generateKeyForm">
|
||||
<div class="modal-body">
|
||||
<div class="mb-3">
|
||||
<label for="keyDescription" class="form-label">Key Description</label>
|
||||
<input type="text" class="form-control" id="keyDescription" required>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
|
||||
<button type="submit" class="btn btn-primary">Generate Key</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="/static/js/jquery-3.7.1.min.js"></script>
|
||||
<script src="/static/js/bootstrap.bundle.min.js"></script>
|
||||
<script src="/static/js/dashboard.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
@ -1,173 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Login - Gitea Webhook Ambassador</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css" rel="stylesheet">
|
||||
<style>
|
||||
.login-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
min-height: 100vh;
|
||||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||
}
|
||||
.login-form {
|
||||
width: 100%;
|
||||
max-width: 400px;
|
||||
padding: 2rem;
|
||||
margin: auto;
|
||||
background: white;
|
||||
border-radius: 12px;
|
||||
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
.login-header {
|
||||
text-align: center;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
.login-header h1 {
|
||||
color: #333;
|
||||
font-weight: 600;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
.login-header p {
|
||||
color: #666;
|
||||
margin: 0;
|
||||
}
|
||||
.form-floating {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
.btn-login {
|
||||
width: 100%;
|
||||
padding: 0.75rem;
|
||||
font-weight: 600;
|
||||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||
border: none;
|
||||
border-radius: 8px;
|
||||
}
|
||||
.btn-login:hover {
|
||||
background: linear-gradient(135deg, #5a6fd8 0%, #6a4190 100%);
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
.alert {
|
||||
border-radius: 8px;
|
||||
border: none;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="login-container">
|
||||
<div class="login-form">
|
||||
<div class="login-header">
|
||||
<h1>🔗 Gitea Webhook Ambassador</h1>
|
||||
<p>High-performance Gitea to Jenkins Webhook Service</p>
|
||||
</div>
|
||||
|
||||
<form id="loginForm">
|
||||
<div class="alert alert-danger" role="alert" id="loginError" style="display: none;">
|
||||
</div>
|
||||
|
||||
<div class="form-floating">
|
||||
<input type="password" class="form-control" id="secret_key" name="secret_key" placeholder="Admin Secret Key" required>
|
||||
<label for="secret_key">Admin Secret Key</label>
|
||||
</div>
|
||||
|
||||
<button class="btn btn-primary btn-login" type="submit">
|
||||
<span id="loginBtnText">Login</span>
|
||||
<span id="loginBtnSpinner" class="spinner-border spinner-border-sm" style="display: none;"></span>
|
||||
</button>
|
||||
</form>
|
||||
|
||||
<div class="text-center mt-3">
|
||||
<small class="text-muted">
|
||||
Use the admin secret key for authentication
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/jquery@3.7.1/dist/jquery.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js"></script>
|
||||
<script>
|
||||
$(document).ready(function() {
|
||||
// Check if already logged in
|
||||
const token = localStorage.getItem('auth_token');
|
||||
if (token) {
|
||||
window.location.href = '/dashboard';
|
||||
return;
|
||||
}
|
||||
|
||||
// Check secret_key in URL params
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const secretKeyFromUrl = urlParams.get('secret_key');
|
||||
if (secretKeyFromUrl) {
|
||||
$('#secret_key').val(secretKeyFromUrl);
|
||||
// Auto-submit login
|
||||
$('#loginForm').submit();
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle login form submit
|
||||
$('#loginForm').on('submit', function(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const secretKey = $('#secret_key').val();
|
||||
if (!secretKey) {
|
||||
showError('Please enter the admin secret key');
|
||||
return;
|
||||
}
|
||||
|
||||
// Show loading state
|
||||
$('#loginBtnText').hide();
|
||||
$('#loginBtnSpinner').show();
|
||||
$('#loginError').hide();
|
||||
|
||||
// Send login request
|
||||
$.ajax({
|
||||
url: '/api/auth/login',
|
||||
method: 'POST',
|
||||
contentType: 'application/json',
|
||||
data: JSON.stringify({ secret_key: secretKey }),
|
||||
success: function(response) {
|
||||
if (response && response.token) {
|
||||
// Save token and redirect
|
||||
localStorage.setItem('auth_token', response.token);
|
||||
window.location.href = '/dashboard';
|
||||
} else {
|
||||
showError('Invalid server response');
|
||||
}
|
||||
},
|
||||
error: function(xhr) {
|
||||
console.error('Login error:', xhr);
|
||||
let errorMsg = 'Login failed, please try again';
|
||||
|
||||
if (xhr.responseJSON && xhr.responseJSON.detail) {
|
||||
errorMsg = xhr.responseJSON.detail;
|
||||
}
|
||||
|
||||
showError(errorMsg);
|
||||
$('#secret_key').val('').focus();
|
||||
},
|
||||
complete: function() {
|
||||
// Restore button state
|
||||
$('#loginBtnText').show();
|
||||
$('#loginBtnSpinner').hide();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
function showError(message) {
|
||||
$('#loginError').text(message).show();
|
||||
}
|
||||
|
||||
// Enter key submit
|
||||
$('#secret_key').on('keypress', function(e) {
|
||||
if (e.which === 13) {
|
||||
$('#loginForm').submit();
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@ -1,133 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Gitea Webhook Ambassador - Version Check Script
|
||||
# Used to distinguish between Go and Python versions
|
||||
|
||||
echo "🔍 Checking Gitea Webhook Ambassador version..."
|
||||
|
||||
# Check port 8000 (Python version default port)
|
||||
echo "📡 Checking port 8000 (Python version)..."
|
||||
if lsof -i :8000 > /dev/null 2>&1; then
|
||||
PID=$(lsof -ti :8000)
|
||||
PROCESS=$(ps -p $PID -o comm= 2>/dev/null)
|
||||
echo "✅ Port 8000 is occupied (PID: $PID, process: $PROCESS)"
|
||||
|
||||
# Check if it is a Python process
|
||||
if echo "$PROCESS" | grep -q "python\|uvicorn"; then
|
||||
echo "🐍 Detected Python version is running"
|
||||
|
||||
# Try to access Python version API
|
||||
if curl -s http://localhost:8000/api/health > /dev/null 2>&1; then
|
||||
echo "✅ Python version API is responsive"
|
||||
echo "🌐 Access: http://localhost:8000"
|
||||
echo "📊 Dashboard: http://localhost:8000/dashboard"
|
||||
else
|
||||
echo "⚠️ Python process exists but API is not responsive"
|
||||
fi
|
||||
else
|
||||
echo "⚠️ Port 8000 is occupied by another process"
|
||||
fi
|
||||
else
|
||||
echo "❌ Port 8000 is not occupied (Python version not running)"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Check port 8080 (Go version default port)
|
||||
echo "📡 Checking port 8080 (Go version)..."
|
||||
if lsof -i :8080 > /dev/null 2>&1; then
|
||||
PID=$(lsof -ti :8080)
|
||||
PROCESS=$(ps -p $PID -o comm= 2>/dev/null)
|
||||
echo "✅ Port 8080 is occupied (PID: $PID, process: $PROCESS)"
|
||||
|
||||
# Check if it is a Go process
|
||||
if echo "$PROCESS" | grep -q "gitea-webhook-ambassador"; then
|
||||
echo "🚀 Detected Go version is running"
|
||||
|
||||
# Try to access Go version API
|
||||
if curl -s http://localhost:8080/health > /dev/null 2>&1; then
|
||||
echo "✅ Go version API is responsive"
|
||||
echo "🌐 Access: http://localhost:8080"
|
||||
else
|
||||
echo "⚠️ Go process exists but API is not responsive"
|
||||
fi
|
||||
else
|
||||
echo "⚠️ Port 8080 is occupied by another process"
|
||||
fi
|
||||
else
|
||||
echo "❌ Port 8080 is not occupied (Go version not running)"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Check PID file
|
||||
echo "📁 Checking PID file..."
|
||||
|
||||
# Python version PID file
|
||||
PYTHON_PID_FILE="/home/nicolas/freeleaps-ops/apps/gitea-webhook-ambassador-python/service.pid"
|
||||
if [ -f "$PYTHON_PID_FILE" ]; then
|
||||
PYTHON_PID=$(cat "$PYTHON_PID_FILE")
|
||||
if ps -p $PYTHON_PID > /dev/null 2>&1; then
|
||||
echo "✅ Python version PID file exists (PID: $PYTHON_PID)"
|
||||
else
|
||||
echo "⚠️ Python version PID file exists but process does not exist"
|
||||
fi
|
||||
else
|
||||
echo "❌ Python version PID file does not exist"
|
||||
fi
|
||||
|
||||
# Go version PID file (if exists)
|
||||
GO_PID_FILE="/home/nicolas/freeleaps-ops/apps/gitea-webhook-ambassador/service.pid"
|
||||
if [ -f "$GO_PID_FILE" ]; then
|
||||
GO_PID=$(cat "$GO_PID_FILE")
|
||||
if ps -p $GO_PID > /dev/null 2>&1; then
|
||||
echo "✅ Go version PID file exists (PID: $GO_PID)"
|
||||
else
|
||||
echo "⚠️ Go version PID file exists but process does not exist"
|
||||
fi
|
||||
else
|
||||
echo "❌ Go version PID file does not exist"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Summary
|
||||
echo "📊 Summary:"
|
||||
echo "----------------------------------------"
|
||||
|
||||
PYTHON_RUNNING=false
|
||||
GO_RUNNING=false
|
||||
|
||||
# Check Python version
|
||||
if lsof -i :8000 > /dev/null 2>&1; then
|
||||
PID=$(lsof -ti :8000)
|
||||
PROCESS=$(ps -p $PID -o comm= 2>/dev/null)
|
||||
if echo "$PROCESS" | grep -q "python\|uvicorn"; then
|
||||
PYTHON_RUNNING=true
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check Go version
|
||||
if lsof -i :8080 > /dev/null 2>&1; then
|
||||
PID=$(lsof -ti :8080)
|
||||
PROCESS=$(ps -p $PID -o comm= 2>/dev/null)
|
||||
if echo "$PROCESS" | grep -q "gitea-webhook-ambassador"; then
|
||||
GO_RUNNING=true
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$PYTHON_RUNNING" = true ] && [ "$GO_RUNNING" = true ]; then
|
||||
echo "⚠️ Both versions are running!"
|
||||
echo "🐍 Python version: http://localhost:8000"
|
||||
echo "🚀 Go version: http://localhost:8080"
|
||||
elif [ "$PYTHON_RUNNING" = true ]; then
|
||||
echo "✅ Currently running: Python version"
|
||||
echo "🌐 Access: http://localhost:8000"
|
||||
elif [ "$GO_RUNNING" = true ]; then
|
||||
echo "✅ Currently running: Go version"
|
||||
echo "🌐 Access: http://localhost:8080"
|
||||
else
|
||||
echo "❌ No version is running"
|
||||
fi
|
||||
|
||||
echo "----------------------------------------"
|
||||
@ -1,39 +0,0 @@
|
||||
# Environment dispatch configuration
|
||||
environments:
|
||||
dev:
|
||||
branches: ["dev", "develop", "development", "feature/*"]
|
||||
jenkins_job: "alpha-build"
|
||||
jenkins_url: "https://jenkins-alpha.freeleaps.com"
|
||||
priority: 2
|
||||
|
||||
prod:
|
||||
branches: ["prod", "production", "main", "master", "release/*"]
|
||||
jenkins_job: "production-build"
|
||||
jenkins_url: "https://jenkins-prod.freeleaps.com"
|
||||
priority: 1
|
||||
|
||||
staging:
|
||||
branches: ["staging", "stage", "pre-prod"]
|
||||
jenkins_job: "staging-build"
|
||||
jenkins_url: "https://jenkins-staging.freeleaps.com"
|
||||
priority: 3
|
||||
|
||||
default:
|
||||
branches: ["*"]
|
||||
jenkins_job: "default-build"
|
||||
jenkins_url: "https://jenkins-default.freeleaps.com"
|
||||
priority: 4
|
||||
|
||||
# Deduplication configuration
|
||||
deduplication:
|
||||
enabled: true
|
||||
window_seconds: 300 # 5-minute deduplication window
|
||||
strategy: "commit_branch" # commit_hash + branch
|
||||
cache_ttl: 3600 # Cache for 1 hour
|
||||
|
||||
# Queue configuration
|
||||
queue:
|
||||
max_concurrent: 10
|
||||
max_retries: 3
|
||||
retry_delay: 60 # seconds
|
||||
priority_levels: 4
|
||||
@ -1,32 +0,0 @@
|
||||
server:
|
||||
port: 8000
|
||||
webhookPath: "/webhook"
|
||||
secretHeader: "X-Gitea-Signature"
|
||||
secretKey: "admin-secret-key-change-in-production"
|
||||
|
||||
jenkins:
|
||||
url: "http://jenkins.example.com"
|
||||
username: "jenkins-user"
|
||||
token: "jenkins-api-token"
|
||||
timeout: 30
|
||||
|
||||
admin:
|
||||
token: "admin-api-token" # Token for admin API access
|
||||
|
||||
database:
|
||||
path: "data/gitea-webhook-ambassador.db" # Path to SQLite database file
|
||||
|
||||
logging:
|
||||
level: "info" # debug, info, warn, error
|
||||
format: "text" # text, json
|
||||
file: "logs/service.log" # stdout if empty, or path to log file
|
||||
|
||||
worker:
|
||||
poolSize: 10 # Number of concurrent workers
|
||||
queueSize: 100 # Size of job queue
|
||||
maxRetries: 3 # Maximum number of retry attempts
|
||||
retryBackoff: 1 # Initial retry backoff in seconds (exponential)
|
||||
|
||||
eventCleanup:
|
||||
interval: 3600 # Cleanup interval in seconds
|
||||
expireAfter: 7200 # Event expiration time in seconds
|
||||
@ -1,220 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Gitea Webhook Ambassador (Python) - Devbox Script
|
||||
# This script mimics the Go version's devbox functionality
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
APP_NAME="gitea-webhook-ambassador"
|
||||
PID_FILE="$SCRIPT_DIR/service.pid"
|
||||
LOG_FILE="$SCRIPT_DIR/logs/service.log"
|
||||
|
||||
# Create logs directory
|
||||
mkdir -p "$SCRIPT_DIR/logs"
|
||||
|
||||
# Function to show usage
|
||||
show_usage() {
|
||||
echo "Usage: $0 {start|stop|restart|status|logs|follow|init|install|help}"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " start - Start the service in background"
|
||||
echo " stop - Stop the service"
|
||||
echo " restart - Restart the service"
|
||||
echo " status - Show service status"
|
||||
echo " logs - Show latest logs"
|
||||
echo " follow - Follow logs in real-time"
|
||||
echo " init - Initialize database"
|
||||
echo " install - Install dependencies"
|
||||
echo " help - Show this help message"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 start # Start service"
|
||||
echo " $0 status # Check status"
|
||||
echo " $0 logs # View logs"
|
||||
}
|
||||
|
||||
# Function to check if virtual environment exists
|
||||
check_venv() {
|
||||
if [ ! -d "$SCRIPT_DIR/venv" ]; then
|
||||
echo "❌ Virtual environment not found. Run '$0 install' first."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to activate virtual environment
|
||||
activate_venv() {
|
||||
source "$SCRIPT_DIR/venv/bin/activate"
|
||||
}
|
||||
|
||||
# Function to start service
|
||||
start_service() {
|
||||
echo "🚀 Starting $APP_NAME (Python Version)..."
|
||||
echo "🐍 Version: Python Enhanced with Web UI"
|
||||
|
||||
check_venv
|
||||
|
||||
if [ -f "$PID_FILE" ]; then
|
||||
PID=$(cat "$PID_FILE")
|
||||
if ps -p $PID > /dev/null 2>&1; then
|
||||
echo "❌ Service is already running (PID: $PID)"
|
||||
return 1
|
||||
else
|
||||
echo "⚠️ Found stale PID file, cleaning up..."
|
||||
rm -f "$PID_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Activate virtual environment and start service
|
||||
cd "$SCRIPT_DIR"
|
||||
activate_venv
|
||||
|
||||
# Start the service in background
|
||||
nohup python -m uvicorn app.main_enhanced:app --host 0.0.0.0 --port 8000 > "$LOG_FILE" 2>&1 &
|
||||
PID=$!
|
||||
echo $PID > "$PID_FILE"
|
||||
|
||||
# Wait a moment for service to start
|
||||
sleep 3
|
||||
|
||||
if ps -p $PID > /dev/null 2>&1; then
|
||||
echo "✅ Python Service started successfully (PID: $PID)"
|
||||
echo "📝 Log file: $LOG_FILE"
|
||||
echo "🌐 Access: http://localhost:8000"
|
||||
echo "📊 Dashboard: http://localhost:8000/dashboard"
|
||||
echo "🔑 Admin key: admin-secret-key-change-in-production"
|
||||
echo "🐍 Python Version Features: Web UI, Database, JWT Auth"
|
||||
else
|
||||
echo "❌ Service failed to start"
|
||||
rm -f "$PID_FILE"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to stop service
|
||||
stop_service() {
|
||||
echo "🛑 Stopping $APP_NAME..."
|
||||
|
||||
if [ -f "$PID_FILE" ]; then
|
||||
PID=$(cat "$PID_FILE")
|
||||
if ps -p $PID > /dev/null 2>&1; then
|
||||
kill $PID
|
||||
echo "✅ Service stopped (PID: $PID)"
|
||||
else
|
||||
echo "⚠️ Service not running"
|
||||
fi
|
||||
rm -f "$PID_FILE"
|
||||
else
|
||||
echo "⚠️ No PID file found"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to restart service
|
||||
restart_service() {
|
||||
echo "🔄 Restarting $APP_NAME..."
|
||||
stop_service
|
||||
sleep 2
|
||||
start_service
|
||||
}
|
||||
|
||||
# Function to show status
|
||||
show_status() {
|
||||
if [ -f "$PID_FILE" ]; then
|
||||
PID=$(cat "$PID_FILE")
|
||||
if ps -p $PID > /dev/null 2>&1; then
|
||||
echo "✅ $APP_NAME (Python Version) is running (PID: $PID)"
|
||||
echo "🐍 Version: Python Enhanced with Web UI"
|
||||
echo "📝 Log file: $LOG_FILE"
|
||||
echo "🌐 Access: http://localhost:8000"
|
||||
echo "📊 Dashboard: http://localhost:8000/dashboard"
|
||||
else
|
||||
echo "❌ $APP_NAME is not running (PID file exists but process not found)"
|
||||
rm -f "$PID_FILE"
|
||||
fi
|
||||
else
|
||||
echo "❌ $APP_NAME is not running"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to show logs
|
||||
show_logs() {
|
||||
if [ -f "$LOG_FILE" ]; then
|
||||
echo "📝 Latest logs (last 50 lines):"
|
||||
echo "----------------------------------------"
|
||||
tail -n 50 "$LOG_FILE"
|
||||
echo "----------------------------------------"
|
||||
echo "Full log file: $LOG_FILE"
|
||||
else
|
||||
echo "❌ No log file found"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to follow logs
|
||||
follow_logs() {
|
||||
if [ -f "$LOG_FILE" ]; then
|
||||
echo "📝 Following logs (Ctrl+C to exit):"
|
||||
tail -f "$LOG_FILE"
|
||||
else
|
||||
echo "❌ No log file found"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to initialize database
|
||||
init_database() {
|
||||
echo "🗄️ Initializing database..."
|
||||
check_venv
|
||||
cd "$SCRIPT_DIR"
|
||||
activate_venv
|
||||
python -c "from app.models.database import create_tables; create_tables(); print('Database initialized successfully')"
|
||||
}
|
||||
|
||||
# Function to install dependencies
|
||||
install_dependencies() {
|
||||
echo "📦 Installing dependencies..."
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
if [ -d "venv" ]; then
|
||||
echo "⚠️ Virtual environment already exists. Removing..."
|
||||
rm -rf venv
|
||||
fi
|
||||
|
||||
python3 -m venv venv
|
||||
activate_venv
|
||||
pip install -r requirements.txt
|
||||
echo "✅ Dependencies installed successfully"
|
||||
}
|
||||
|
||||
# Main logic
|
||||
case "$1" in
|
||||
start)
|
||||
start_service
|
||||
;;
|
||||
stop)
|
||||
stop_service
|
||||
;;
|
||||
restart)
|
||||
restart_service
|
||||
;;
|
||||
status)
|
||||
show_status
|
||||
;;
|
||||
logs)
|
||||
show_logs
|
||||
;;
|
||||
follow)
|
||||
follow_logs
|
||||
;;
|
||||
init)
|
||||
init_database
|
||||
;;
|
||||
install)
|
||||
install_dependencies
|
||||
;;
|
||||
help|--help|-h)
|
||||
show_usage
|
||||
;;
|
||||
*)
|
||||
echo "❌ Unknown command: $1"
|
||||
echo ""
|
||||
show_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@ -1,176 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# Redis service
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: webhook-ambassador-redis
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
command: redis-server --appendonly yes
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# PostgreSQL database (optional, for production)
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
container_name: webhook-ambassador-postgres
|
||||
environment:
|
||||
POSTGRES_DB: webhook_ambassador
|
||||
POSTGRES_USER: webhook_user
|
||||
POSTGRES_PASSWORD: webhook_password
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U webhook_user -d webhook_ambassador"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Webhook Ambassador API service
|
||||
api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: webhook-ambassador-api
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/0
|
||||
- DATABASE_URL=postgresql://webhook_user:webhook_password@postgres:5432/webhook_ambassador
|
||||
- JENKINS_USERNAME=${JENKINS_USERNAME}
|
||||
- JENKINS_TOKEN=${JENKINS_TOKEN}
|
||||
- SECURITY_SECRET_KEY=${SECURITY_SECRET_KEY}
|
||||
- LOGGING_LEVEL=INFO
|
||||
volumes:
|
||||
- ./config:/app/config
|
||||
- ./logs:/app/logs
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
restart: unless-stopped
|
||||
|
||||
# Celery Worker
|
||||
worker:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: webhook-ambassador-worker
|
||||
command: celery -A app.tasks.jenkins_tasks worker --loglevel=info --concurrency=4
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/0
|
||||
- DATABASE_URL=postgresql://webhook_user:webhook_password@postgres:5432/webhook_ambassador
|
||||
- JENKINS_USERNAME=${JENKINS_USERNAME}
|
||||
- JENKINS_TOKEN=${JENKINS_TOKEN}
|
||||
- SECURITY_SECRET_KEY=${SECURITY_SECRET_KEY}
|
||||
- LOGGING_LEVEL=INFO
|
||||
volumes:
|
||||
- ./config:/app/config
|
||||
- ./logs:/app/logs
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
# Celery Beat (scheduler)
|
||||
beat:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: webhook-ambassador-beat
|
||||
command: celery -A app.tasks.jenkins_tasks beat --loglevel=info
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/0
|
||||
- DATABASE_URL=postgresql://webhook_user:webhook_password@postgres:5432/webhook_ambassador
|
||||
- JENKINS_USERNAME=${JENKINS_USERNAME}
|
||||
- JENKINS_TOKEN=${JENKINS_TOKEN}
|
||||
- SECURITY_SECRET_KEY=${SECURITY_SECRET_KEY}
|
||||
- LOGGING_LEVEL=INFO
|
||||
volumes:
|
||||
- ./config:/app/config
|
||||
- ./logs:/app/logs
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
# Flower (Celery monitoring)
|
||||
flower:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: webhook-ambassador-flower
|
||||
command: celery -A app.tasks.jenkins_tasks flower --port=5555
|
||||
ports:
|
||||
- "5555:5555"
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/0
|
||||
- DATABASE_URL=postgresql://webhook_user:webhook_password@postgres:5432/webhook_ambassador
|
||||
- JENKINS_USERNAME=${JENKINS_USERNAME}
|
||||
- JENKINS_TOKEN=${JENKINS_TOKEN}
|
||||
- SECURITY_SECRET_KEY=${SECURITY_SECRET_KEY}
|
||||
- LOGGING_LEVEL=INFO
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
# Prometheus (monitoring)
|
||||
prometheus:
|
||||
image: prom/prometheus:latest
|
||||
container_name: webhook-ambassador-prometheus
|
||||
ports:
|
||||
- "9090:9090"
|
||||
volumes:
|
||||
- ./monitoring/prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus_data:/prometheus
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
- '--storage.tsdb.path=/prometheus'
|
||||
- '--web.console.libraries=/etc/prometheus/console_libraries'
|
||||
- '--web.console.templates=/etc/prometheus/consoles'
|
||||
- '--storage.tsdb.retention.time=200h'
|
||||
- '--web.enable-lifecycle'
|
||||
restart: unless-stopped
|
||||
|
||||
# Grafana (monitoring dashboard)
|
||||
grafana:
|
||||
image: grafana/grafana:latest
|
||||
container_name: webhook-ambassador-grafana
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
- GF_SECURITY_ADMIN_PASSWORD=admin
|
||||
volumes:
|
||||
- grafana_data:/var/lib/grafana
|
||||
- ./monitoring/grafana/dashboards:/etc/grafana/provisioning/dashboards
|
||||
- ./monitoring/grafana/datasources:/etc/grafana/provisioning/datasources
|
||||
depends_on:
|
||||
- prometheus
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
postgres_data:
|
||||
prometheus_data:
|
||||
grafana_data:
|
||||
@ -1,42 +0,0 @@
|
||||
# Application configuration
|
||||
APP_NAME=Gitea Webhook Ambassador
|
||||
DEBUG=false
|
||||
HOST=0.0.0.0
|
||||
PORT=8000
|
||||
|
||||
# Database configuration
|
||||
DATABASE_URL=sqlite:///./webhook_ambassador.db
|
||||
# For production, use PostgreSQL:
|
||||
# DATABASE_URL=postgresql://webhook_user:webhook_password@localhost:5432/webhook_ambassador
|
||||
|
||||
# Redis configuration
|
||||
REDIS_URL=redis://localhost:6379/0
|
||||
REDIS_PASSWORD=
|
||||
REDIS_DB=0
|
||||
|
||||
# Jenkins configuration
|
||||
JENKINS_USERNAME=your_jenkins_username
|
||||
JENKINS_TOKEN=115127e693f1bc6b7194f58ff6d6283bd0
|
||||
JENKINS_TIMEOUT=30
|
||||
|
||||
# Security configuration
|
||||
SECURITY_SECRET_KEY=r6Y@QTb*7BQN@hDGsN
|
||||
SECURITY_WEBHOOK_SECRET_HEADER=X-Gitea-Signature
|
||||
SECURITY_RATE_LIMIT_PER_MINUTE=100
|
||||
|
||||
# Logging configuration
|
||||
LOGGING_LEVEL=INFO
|
||||
LOGGING_FORMAT=json
|
||||
LOGGING_FILE=
|
||||
|
||||
# Queue configuration
|
||||
QUEUE_MAX_CONCURRENT=10
|
||||
QUEUE_MAX_RETRIES=3
|
||||
QUEUE_RETRY_DELAY=60
|
||||
QUEUE_PRIORITY_LEVELS=3
|
||||
|
||||
# Deduplication configuration
|
||||
DEDUPLICATION_ENABLED=true
|
||||
DEDUPLICATION_WINDOW_SECONDS=300
|
||||
DEDUPLICATION_STRATEGY=commit_branch
|
||||
DEDUPLICATION_CACHE_TTL=3600
|
||||
@ -1,41 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Fix PID file issue
|
||||
echo "🔧 Fixing PID file issue..."
|
||||
|
||||
# Find Python service process
|
||||
PID=$(lsof -ti :8000 2>/dev/null)
|
||||
|
||||
if [ -n "$PID" ]; then
|
||||
echo "✅ Found running Python service (PID: $PID)"
|
||||
|
||||
# Check if the process is our service
|
||||
PROCESS=$(ps -p $PID -o comm= 2>/dev/null)
|
||||
if echo "$PROCESS" | grep -q "python"; then
|
||||
echo "🐍 Confirmed: Python version of Gitea Webhook Ambassador"
|
||||
|
||||
# Create PID file
|
||||
echo $PID > service.pid
|
||||
echo "✅ PID file created: service.pid"
|
||||
|
||||
# Verify PID file
|
||||
if [ -f "service.pid" ]; then
|
||||
STORED_PID=$(cat service.pid)
|
||||
echo "📝 PID file content: $STORED_PID"
|
||||
|
||||
if [ "$STORED_PID" = "$PID" ]; then
|
||||
echo "✅ PID file fixed successfully"
|
||||
echo "💡 Now you can use './devbox stop' to stop the service"
|
||||
else
|
||||
echo "❌ PID file content does not match"
|
||||
fi
|
||||
else
|
||||
echo "❌ Failed to create PID file"
|
||||
fi
|
||||
else
|
||||
echo "⚠️ Port 8000 is occupied by another process"
|
||||
fi
|
||||
else
|
||||
echo "❌ No running Python service found"
|
||||
echo "💡 Please start the service first: './devbox start'"
|
||||
fi
|
||||
@ -1,15 +0,0 @@
|
||||
[Unit]
|
||||
Description=Gitea Webhook Ambassador Python Service
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=nicolas
|
||||
WorkingDirectory=/home/nicolas/freeleaps-ops/apps/gitea-webhook-ambassador-python
|
||||
Environment=PATH=/home/nicolas/freeleaps-ops/apps/gitea-webhook-ambassador-python/venv/bin
|
||||
ExecStart=/home/nicolas/freeleaps-ops/apps/gitea-webhook-ambassador-python/venv/bin/python -m uvicorn app.main_demo:app --host 0.0.0.0 --port 8000
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
File diff suppressed because one or more lines are too long
@ -1,31 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Quick check of the currently running version
|
||||
|
||||
echo "🔍 Quick check of Gitea Webhook Ambassador version..."
|
||||
|
||||
# Check Python version (port 8000)
|
||||
if lsof -i :8000 > /dev/null 2>&1; then
|
||||
PID=$(lsof -ti :8000)
|
||||
PROCESS=$(ps -p $PID -o comm= 2>/dev/null)
|
||||
if echo "$PROCESS" | grep -q "python\|uvicorn"; then
|
||||
echo "🐍 Python version is running (PID: $PID)"
|
||||
echo "🌐 http://localhost:8000"
|
||||
echo "📊 http://localhost:8000/dashboard"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check Go version (port 8080)
|
||||
if lsof -i :8080 > /dev/null 2>&1; then
|
||||
PID=$(lsof -ti :8080)
|
||||
PROCESS=$(ps -p $PID -o comm= 2>/dev/null)
|
||||
if echo "$PROCESS" | grep -q "gitea-webhook-ambassador"; then
|
||||
echo "🚀 Go version is running (PID: $PID)"
|
||||
echo "🌐 http://localhost:8080"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "❌ No version is running"
|
||||
echo "💡 Use './devbox start' to start the Python version"
|
||||
@ -1,15 +0,0 @@
|
||||
fastapi==0.104.1
|
||||
uvicorn[standard]==0.24.0
|
||||
pydantic==2.5.0
|
||||
pydantic-settings==2.1.0
|
||||
structlog==23.2.0
|
||||
httpx==0.25.2
|
||||
celery==5.3.4
|
||||
redis==5.0.1
|
||||
sqlalchemy==2.0.23
|
||||
jinja2==3.1.2
|
||||
python-multipart==0.0.6
|
||||
python-jose[cryptography]==3.3.0
|
||||
passlib[bcrypt]==1.7.4
|
||||
psutil==5.9.6
|
||||
aiofiles==23.2.1
|
||||
@ -1,55 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Gitea Webhook Ambassador quick setup script
|
||||
|
||||
echo "🚀 Starting Gitea Webhook Ambassador setup..."
|
||||
|
||||
# Check Python version
|
||||
python_version=$(python3 -V 2>&1 | awk '{print $2}')
|
||||
if [[ "$python_version" < "3.8" ]]; then
|
||||
echo "❌ Python 3.8 or higher is required, current version: $python_version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Python version check passed: $python_version"
|
||||
|
||||
# Create virtual environment
|
||||
if [ ! -d "venv" ]; then
|
||||
echo "📦 Creating virtual environment..."
|
||||
python3 -m venv venv
|
||||
fi
|
||||
|
||||
# Activate virtual environment
|
||||
echo "🔧 Activating virtual environment..."
|
||||
source venv/bin/activate
|
||||
|
||||
# Upgrade pip
|
||||
echo "⬆️ Upgrading pip..."
|
||||
pip install --upgrade pip
|
||||
|
||||
# Install dependencies
|
||||
echo "📚 Installing dependencies..."
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Create config file
|
||||
if [ ! -f ".env" ]; then
|
||||
echo "⚙️ Creating environment config file..."
|
||||
cp env.example .env
|
||||
echo "📝 Please edit the .env file to configure your Jenkins credentials and other settings"
|
||||
fi
|
||||
|
||||
# Create logs directory
|
||||
mkdir -p logs
|
||||
|
||||
# Create database directory
|
||||
mkdir -p data
|
||||
|
||||
echo "✅ Setup complete!"
|
||||
echo "📋 Next steps:"
|
||||
echo "1. Edit the .env file to configure Jenkins credentials"
|
||||
echo "2. Run: source venv/bin/activate"
|
||||
echo "3. Start Redis: docker run -d -p 6379:6379 redis:alpine"
|
||||
echo "4. Start the service: python -m uvicorn app.main:app --reload"
|
||||
echo "5. Start Celery worker: celery -A app.tasks.jenkins_tasks worker --loglevel=info"
|
||||
echo "🌐 Access: http://localhost:8000"
|
||||
echo "📊 Monitoring dashboard: http://localhost:8000/health"
|
||||
@ -1,65 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Gitea Webhook Ambassador start script
|
||||
|
||||
# Check virtual environment
|
||||
if [ ! -d "venv" ]; then
|
||||
echo "❌ Virtual environment does not exist, please run ./scripts/setup.sh first"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate virtual environment
|
||||
source venv/bin/activate
|
||||
|
||||
# Check environment file
|
||||
if [ ! -f ".env" ]; then
|
||||
echo "❌ .env file does not exist, please run ./scripts/setup.sh first"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if Redis is running
|
||||
if ! pgrep -f redis-server > /dev/null; then
|
||||
echo "🐳 Starting Redis..."
|
||||
docker run -d -p 6379:6379 redis:alpine
|
||||
fi
|
||||
|
||||
# Start Gitea Webhook Ambassador
|
||||
echo "🚀 Starting Gitea Webhook Ambassador..."
|
||||
|
||||
# Start API service
|
||||
API_LOG="logs/api.log"
|
||||
echo "🌐 Starting API service..."
|
||||
nohup python -m uvicorn app.main:app --host 0.0.0.0 --port 8000 > $API_LOG 2>&1 &
|
||||
API_PID=$!
|
||||
|
||||
# Wait for API service to start
|
||||
sleep 3
|
||||
|
||||
# Start Celery worker
|
||||
WORKER_LOG="logs/worker.log"
|
||||
echo "⚙️ Starting Celery worker..."
|
||||
nohup celery -A app.tasks.jenkins_tasks worker --loglevel=info > $WORKER_LOG 2>&1 &
|
||||
WORKER_PID=$!
|
||||
|
||||
# Start Celery beat (scheduled tasks)
|
||||
BEAT_LOG="logs/beat.log"
|
||||
echo "⏰ Starting scheduled task scheduler..."
|
||||
nohup celery -A app.tasks.jenkins_tasks beat --loglevel=info > $BEAT_LOG 2>&1 &
|
||||
BEAT_PID=$!
|
||||
|
||||
# All services started
|
||||
sleep 2
|
||||
echo "✅ All services started!"
|
||||
echo "📊 Service status:"
|
||||
echo "- API service: http://localhost:8000 (PID: $API_PID)"
|
||||
echo "- Health check: http://localhost:8000/health"
|
||||
echo "- Metrics: http://localhost:8000/metrics"
|
||||
|
||||
# Wait for Ctrl+C to stop all services
|
||||
echo "🛑 Press Ctrl+C to stop all services"
|
||||
|
||||
# Wait for interrupt signal
|
||||
trap 'echo "🛑 Stopping services..."; kill $API_PID $WORKER_PID $BEAT_PID 2>/dev/null; exit 0' INT
|
||||
|
||||
# Wait for all background processes
|
||||
wait
|
||||
@ -1,154 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Gitea Webhook Ambassador Python Start Script
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
SERVICE_NAME="gitea-webhook-ambassador-python"
|
||||
LOG_FILE="$SCRIPT_DIR/logs/service.log"
|
||||
PID_FILE="$SCRIPT_DIR/service.pid"
|
||||
|
||||
# Create logs directory
|
||||
mkdir -p "$SCRIPT_DIR/logs"
|
||||
|
||||
# Activate virtual environment
|
||||
source "$SCRIPT_DIR/venv/bin/activate"
|
||||
|
||||
# Function: Start service
|
||||
start_service() {
|
||||
echo "🚀 Starting $SERVICE_NAME..."
|
||||
|
||||
if [ -f "$PID_FILE" ]; then
|
||||
PID=$(cat "$PID_FILE")
|
||||
if ps -p $PID > /dev/null 2>&1; then
|
||||
echo "❌ Service is already running (PID: $PID)"
|
||||
return 1
|
||||
else
|
||||
echo "⚠️ Found stale PID file, cleaning up..."
|
||||
rm -f "$PID_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Start service in background
|
||||
nohup python -m uvicorn app.main_demo:app --host 0.0.0.0 --port 8000 > "$LOG_FILE" 2>&1 &
|
||||
PID=$!
|
||||
echo $PID > "$PID_FILE"
|
||||
|
||||
# Wait for service to start
|
||||
sleep 3
|
||||
|
||||
if ps -p $PID > /dev/null 2>&1; then
|
||||
echo "✅ Service started successfully (PID: $PID)"
|
||||
echo "📝 Log file: $LOG_FILE"
|
||||
echo "🌐 Access: http://localhost:8000"
|
||||
echo "🔑 Demo keys: demo_admin_key, demo_user_key"
|
||||
else
|
||||
echo "❌ Service failed to start"
|
||||
rm -f "$PID_FILE"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function: Stop service
|
||||
stop_service() {
|
||||
echo "🛑 Stopping $SERVICE_NAME..."
|
||||
|
||||
if [ -f "$PID_FILE" ]; then
|
||||
PID=$(cat "$PID_FILE")
|
||||
if ps -p $PID > /dev/null 2>&1; then
|
||||
kill $PID
|
||||
echo "✅ Service stopped (PID: $PID)"
|
||||
else
|
||||
echo "⚠️ Service not running"
|
||||
fi
|
||||
rm -f "$PID_FILE"
|
||||
else
|
||||
echo "⚠️ PID file does not exist"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function: Restart service
|
||||
restart_service() {
|
||||
echo "🔄 Restarting $SERVICE_NAME..."
|
||||
stop_service
|
||||
sleep 2
|
||||
start_service
|
||||
}
|
||||
|
||||
# Function: Show status
|
||||
status_service() {
|
||||
if [ -f "$PID_FILE" ]; then
|
||||
PID=$(cat "$PID_FILE")
|
||||
if ps -p $PID > /dev/null 2>&1; then
|
||||
echo "✅ $SERVICE_NAME is running (PID: $PID)"
|
||||
echo "📝 Log file: $LOG_FILE"
|
||||
echo "🌐 Access: http://localhost:8000"
|
||||
else
|
||||
echo "❌ $SERVICE_NAME is not running (PID file exists but process not found)"
|
||||
rm -f "$PID_FILE"
|
||||
fi
|
||||
else
|
||||
echo "❌ $SERVICE_NAME is not running"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function: Show logs
|
||||
show_logs() {
|
||||
if [ -f "$LOG_FILE" ]; then
|
||||
echo "📝 Showing latest logs (last 50 lines):"
|
||||
echo "----------------------------------------"
|
||||
tail -n 50 "$LOG_FILE"
|
||||
echo "----------------------------------------"
|
||||
echo "Full log file: $LOG_FILE"
|
||||
else
|
||||
echo "❌ Log file does not exist"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function: Follow logs
|
||||
follow_logs() {
|
||||
if [ -f "$LOG_FILE" ]; then
|
||||
echo "📝 Real-time logs (Ctrl+C to exit):"
|
||||
tail -f "$LOG_FILE"
|
||||
else
|
||||
echo "❌ Log file does not exist"
|
||||
fi
|
||||
}
|
||||
|
||||
# Main logic
|
||||
case "$1" in
|
||||
start)
|
||||
start_service
|
||||
;;
|
||||
stop)
|
||||
stop_service
|
||||
;;
|
||||
restart)
|
||||
restart_service
|
||||
;;
|
||||
status)
|
||||
status_service
|
||||
;;
|
||||
logs)
|
||||
show_logs
|
||||
;;
|
||||
follow)
|
||||
follow_logs
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 {start|stop|restart|status|logs|follow}"
|
||||
echo ""
|
||||
echo "Command description:"
|
||||
echo " start - Start service"
|
||||
echo " stop - Stop service"
|
||||
echo " restart - Restart service"
|
||||
echo " status - Show service status"
|
||||
echo " logs - Show latest logs"
|
||||
echo " follow - Show real-time logs"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 start # Start service"
|
||||
echo " $0 status # Show status"
|
||||
echo " $0 logs # Show logs"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@ -1,170 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Authentication feature test script
|
||||
Demonstrates how to properly use JWT and API key authentication
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
BASE_URL = "http://localhost:8000"
|
||||
|
||||
def print_divider():
|
||||
print("-" * 50)
|
||||
|
||||
async def test_jwt_authentication():
|
||||
"""Test JWT authentication"""
|
||||
print("🔐 Testing JWT authentication")
|
||||
print_divider()
|
||||
|
||||
# Note: In actual applications, JWT tokens should be obtained via the login endpoint
|
||||
# Here we use a sample token (in real environments, obtain from login endpoint)
|
||||
|
||||
# Simulate JWT token (should be obtained from login endpoint in real use)
|
||||
jwt_token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhZG1pbiIsImV4cCI6MTczMjAwMDAwMH0.test"
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# Use JWT token to access admin endpoint
|
||||
headers = {"Authorization": f"Bearer {jwt_token}"}
|
||||
|
||||
# Test access to logs endpoint
|
||||
async with session.get(f"{BASE_URL}/api/logs", headers=headers) as response:
|
||||
if response.status == 200:
|
||||
logs = await response.json()
|
||||
print("✅ JWT authentication succeeded - logs access")
|
||||
print(f" Retrieved {len(logs)} logs")
|
||||
else:
|
||||
print(f"❌ JWT authentication failed - logs access: {response.status}")
|
||||
if response.status == 401:
|
||||
print(" Reason: JWT token is invalid or expired")
|
||||
print()
|
||||
|
||||
async def test_api_key_authentication():
|
||||
"""Test API key authentication"""
|
||||
print("🔑 Testing API key authentication")
|
||||
print_divider()
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# First, create an API key (requires admin privileges)
|
||||
# Note: Here we use a temporary authentication method
|
||||
|
||||
# Method 1: Use in-memory API key (for demo only)
|
||||
# In real applications, API keys should be created via the admin interface
|
||||
|
||||
# Simulate a valid API key
|
||||
api_key = "test_api_key_12345"
|
||||
|
||||
headers = {"Authorization": f"Bearer {api_key}"}
|
||||
|
||||
# Test access to logs endpoint
|
||||
async with session.get(f"{BASE_URL}/api/logs", headers=headers) as response:
|
||||
if response.status == 200:
|
||||
logs = await response.json()
|
||||
print("✅ API key authentication succeeded - logs access")
|
||||
print(f" Retrieved {len(logs)} logs")
|
||||
else:
|
||||
print(f"❌ API key authentication failed - logs access: {response.status}")
|
||||
if response.status == 401:
|
||||
print(" Reason: API key is invalid or revoked")
|
||||
print()
|
||||
|
||||
async def test_public_endpoints():
|
||||
"""Test public endpoints (no authentication required)"""
|
||||
print("🌐 Testing public endpoints")
|
||||
print_divider()
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# Health check endpoint (no authentication required)
|
||||
async with session.get(f"{BASE_URL}/health") as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
print("✅ Health check endpoint accessed successfully")
|
||||
print(f" Status: {data['status']}")
|
||||
else:
|
||||
print(f"❌ Health check endpoint access failed: {response.status}")
|
||||
|
||||
# Webhook endpoint (no authentication required)
|
||||
webhook_data = {"test": "webhook_data"}
|
||||
async with session.post(f"{BASE_URL}/webhook/gitea", json=webhook_data) as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
print("✅ Webhook endpoint accessed successfully")
|
||||
print(f" Response: {data['message']}")
|
||||
else:
|
||||
print(f"❌ Webhook endpoint access failed: {response.status}")
|
||||
print()
|
||||
|
||||
async def test_authentication_flow():
|
||||
"""Test the complete authentication flow"""
|
||||
print("🔄 Testing complete authentication flow")
|
||||
print_divider()
|
||||
|
||||
print("📋 Authentication flow description:")
|
||||
print("1. Public endpoints: /health, /webhook/gitea - no authentication required")
|
||||
print("2. Admin endpoints: /api/admin/* - JWT or API key required")
|
||||
print("3. Logs endpoints: /api/logs/* - JWT or API key required")
|
||||
print()
|
||||
|
||||
print("🔧 How to obtain authentication tokens:")
|
||||
print("1. JWT token: Obtain via login endpoint (login feature required)")
|
||||
print("2. API key: Create via admin interface (admin privileges required)")
|
||||
print()
|
||||
|
||||
print("⚠️ Demo limitations:")
|
||||
print("- Using simulated authentication tokens")
|
||||
print("- In real applications, implement full login and key management")
|
||||
print("- It is recommended to use real authentication systems in production")
|
||||
print()
|
||||
|
||||
async def create_demo_api_key():
|
||||
"""Create a demo API key"""
|
||||
print("🔧 Creating demo API key")
|
||||
print_divider()
|
||||
|
||||
# Note: This is a simplified demo
|
||||
# In real applications, API keys should be created and stored securely
|
||||
|
||||
demo_api_key = "demo_api_key_" + str(int(datetime.now().timestamp()))
|
||||
|
||||
print(f"✅ Demo API key created: {demo_api_key}")
|
||||
print("📝 Usage:")
|
||||
print(f" curl -H 'Authorization: Bearer {demo_api_key}' {BASE_URL}/api/logs")
|
||||
print()
|
||||
|
||||
return demo_api_key
|
||||
|
||||
async def main():
|
||||
"""Main test function"""
|
||||
print("🚀 Starting authentication feature tests")
|
||||
print("=" * 60)
|
||||
print()
|
||||
|
||||
try:
|
||||
# Wait for service to start
|
||||
await asyncio.sleep(2)
|
||||
|
||||
await test_public_endpoints()
|
||||
await test_jwt_authentication()
|
||||
await test_api_key_authentication()
|
||||
await test_authentication_flow()
|
||||
|
||||
# Create demo API key
|
||||
demo_key = await create_demo_api_key()
|
||||
|
||||
print("=" * 60)
|
||||
print("🎉 Authentication feature tests completed!")
|
||||
print()
|
||||
print("📚 Next steps:")
|
||||
print("1. Implement a full login system")
|
||||
print("2. Add user management features")
|
||||
print("3. Implement secure API key storage")
|
||||
print("4. Add permission control mechanisms")
|
||||
print("5. Implement session management")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error occurred during testing: {str(e)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
@ -1,222 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Enhanced Gitea Webhook Ambassador feature test script
|
||||
Demonstrates all new monitoring and management features
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
BASE_URL = "http://localhost:8000"
|
||||
|
||||
def print_divider():
|
||||
print("-" * 50)
|
||||
|
||||
async def test_health_check():
|
||||
"""Test enhanced health check"""
|
||||
print("🧪 Testing enhanced health check")
|
||||
print_divider()
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(f"{BASE_URL}/health") as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
print("✅ Health check passed")
|
||||
print(f" Status: {data['status']}")
|
||||
print(f" Service: {data['service']}")
|
||||
print(f" Jenkins: {data['jenkins']['status']}")
|
||||
print(f" Worker pool: {data['worker_pool']['active_workers']} active workers")
|
||||
print(f" Queue size: {data['worker_pool']['queue_size']}")
|
||||
print(f" Processed: {data['worker_pool']['total_processed']}")
|
||||
print(f" Failed: {data['worker_pool']['total_failed']}")
|
||||
else:
|
||||
print(f"❌ Health check failed: {response.status}")
|
||||
print()
|
||||
|
||||
async def test_webhook():
|
||||
"""Test webhook feature"""
|
||||
print("🧪 Testing webhook feature")
|
||||
print_divider()
|
||||
|
||||
webhook_data = {
|
||||
"ref": "refs/heads/dev",
|
||||
"before": "abc123",
|
||||
"after": "def456",
|
||||
"repository": {
|
||||
"full_name": "freeleaps/test-project",
|
||||
"clone_url": "https://gitea.freeleaps.com/freeleaps/test-project.git"
|
||||
},
|
||||
"pusher": {
|
||||
"login": "developer",
|
||||
"email": "dev@freeleaps.com"
|
||||
}
|
||||
}
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(
|
||||
f"{BASE_URL}/webhook/gitea",
|
||||
json=webhook_data
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
print("✅ Webhook processed successfully")
|
||||
print(f" Response: {data['message']}")
|
||||
print(f" Data size: {data['data']['body_size']} bytes")
|
||||
else:
|
||||
print(f"❌ Webhook processing failed: {response.status}")
|
||||
print()
|
||||
|
||||
async def test_api_key_management():
|
||||
"""Test API key management"""
|
||||
print("🧪 Testing API key management")
|
||||
print_divider()
|
||||
|
||||
# Create API key
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# Create key
|
||||
create_data = {"name": "test-api-key"}
|
||||
async with session.post(
|
||||
f"{BASE_URL}/api/admin/api-keys",
|
||||
json=create_data,
|
||||
headers={"Authorization": "Bearer test-token"}
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
api_key = data['key']
|
||||
key_id = data['id']
|
||||
print(f"✅ API key created successfully")
|
||||
print(f" ID: {key_id}")
|
||||
print(f" Name: {data['name']}")
|
||||
print(f" Key: {api_key[:8]}...{api_key[-8:]}")
|
||||
|
||||
# Test logs endpoint with new key
|
||||
print("\n Testing logs endpoint with new key...")
|
||||
async with session.get(
|
||||
f"{BASE_URL}/api/logs",
|
||||
headers={"Authorization": f"Bearer {api_key}"}
|
||||
) as log_response:
|
||||
if log_response.status == 200:
|
||||
logs = await log_response.json()
|
||||
print(f" ✅ Logs access succeeded, retrieved {len(logs)} logs")
|
||||
else:
|
||||
print(f" ❌ Logs access failed: {log_response.status}")
|
||||
|
||||
# Delete key
|
||||
async with session.delete(
|
||||
f"{BASE_URL}/api/admin/api-keys/{key_id}",
|
||||
headers={"Authorization": f"Bearer {api_key}"}
|
||||
) as delete_response:
|
||||
if delete_response.status == 200:
|
||||
print(f" ✅ API key deleted successfully")
|
||||
else:
|
||||
print(f" ❌ API key deletion failed: {delete_response.status}")
|
||||
else:
|
||||
print(f"❌ API key creation failed: {response.status}")
|
||||
print()
|
||||
|
||||
async def test_project_mapping():
|
||||
"""Test project mapping management"""
|
||||
print("🧪 Testing project mapping management")
|
||||
print_divider()
|
||||
|
||||
mapping_data = {
|
||||
"repository_name": "freeleaps/test-project",
|
||||
"default_job": "test-project-build",
|
||||
"branch_jobs": [
|
||||
{"branch": "dev", "job": "test-project-dev"},
|
||||
{"branch": "staging", "job": "test-project-staging"}
|
||||
],
|
||||
"branch_patterns": [
|
||||
{"pattern": "feature/*", "job": "test-project-feature"},
|
||||
{"pattern": "hotfix/*", "job": "test-project-hotfix"}
|
||||
]
|
||||
}
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# Create project mapping
|
||||
async with session.post(
|
||||
f"{BASE_URL}/api/admin/projects",
|
||||
json=mapping_data,
|
||||
headers={"Authorization": "Bearer test-token"}
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
print("✅ Project mapping created successfully")
|
||||
print(f" ID: {data['id']}")
|
||||
print(f" Repository: {data['repository_name']}")
|
||||
print(f" Default job: {data['default_job']}")
|
||||
print(f" Branch jobs: {len(data['branch_jobs'])}")
|
||||
print(f" Branch patterns: {len(data['branch_patterns'])}")
|
||||
else:
|
||||
print(f"❌ Project mapping creation failed: {response.status}")
|
||||
print()
|
||||
|
||||
async def test_logs_and_stats():
|
||||
"""Test logs and statistics features"""
|
||||
print("🧪 Testing logs and statistics features")
|
||||
print_divider()
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# Get log statistics
|
||||
async with session.get(
|
||||
f"{BASE_URL}/api/logs/stats",
|
||||
headers={"Authorization": "Bearer test-token"}
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
stats = await response.json()
|
||||
print("✅ Log statistics retrieved successfully")
|
||||
print(f" Total logs: {stats['total_logs']}")
|
||||
print(f" Successful logs: {stats['successful_logs']}")
|
||||
print(f" Failed logs: {stats['failed_logs']}")
|
||||
print(f" Recent logs (24h): {stats['recent_logs_24h']}")
|
||||
print(f" Repository stats: {len(stats['repository_stats'])} repositories")
|
||||
else:
|
||||
print(f"❌ Log statistics retrieval failed: {response.status}")
|
||||
print()
|
||||
|
||||
async def test_admin_stats():
|
||||
"""Test admin statistics"""
|
||||
print("🧪 Testing admin statistics")
|
||||
print_divider()
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(
|
||||
f"{BASE_URL}/api/admin/stats",
|
||||
headers={"Authorization": "Bearer test-token"}
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
stats = await response.json()
|
||||
print("✅ Admin statistics retrieved successfully")
|
||||
print(f" Total API keys: {stats['api_keys']['total']}")
|
||||
print(f" Active keys: {stats['api_keys']['active']}")
|
||||
print(f" Recently used: {stats['api_keys']['recently_used']}")
|
||||
print(f" Total project mappings: {stats['project_mappings']['total']}")
|
||||
else:
|
||||
print(f"❌ Admin statistics retrieval failed: {response.status}")
|
||||
print()
|
||||
|
||||
async def main():
|
||||
"""Main test function"""
|
||||
print("🚀 Starting enhanced Gitea Webhook Ambassador feature tests")
|
||||
print("=" * 60)
|
||||
print()
|
||||
|
||||
try:
|
||||
await test_health_check()
|
||||
await test_webhook()
|
||||
await test_api_key_management()
|
||||
await test_project_mapping()
|
||||
await test_logs_and_stats()
|
||||
await test_admin_stats()
|
||||
|
||||
print("=" * 60)
|
||||
print("🎉 All tests completed!")
|
||||
print("✅ Python version now has the same monitoring and management features as the Go version")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error occurred during testing: {str(e)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
@ -1,225 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Gitea Webhook Ambassador Enhanced Feature Test Script
|
||||
"""
|
||||
|
||||
import requests
|
||||
import json
|
||||
import time
|
||||
|
||||
BASE_URL = "http://localhost:8000"
|
||||
ADMIN_SECRET_KEY = "admin-secret-key-change-in-production"
|
||||
|
||||
def test_health_check():
|
||||
"""Test health check"""
|
||||
print("🔍 Testing health check...")
|
||||
try:
|
||||
response = requests.get(f"{BASE_URL}/health")
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
print(f"✅ Health check succeeded: {data['status']}")
|
||||
print(f" Version: {data['version']}")
|
||||
print(f" Uptime: {data['uptime']}")
|
||||
print(f" Memory usage: {data['memory']}")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Health check failed: {response.status_code}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Health check exception: {e}")
|
||||
return False
|
||||
|
||||
def test_login():
|
||||
"""Test login functionality"""
|
||||
print("\n🔐 Testing login functionality...")
|
||||
try:
|
||||
# Test login API
|
||||
login_data = {"secret_key": ADMIN_SECRET_KEY}
|
||||
response = requests.post(
|
||||
f"{BASE_URL}/api/auth/login",
|
||||
json=login_data,
|
||||
headers={"Content-Type": "application/json"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
token = data.get("token")
|
||||
print(f"✅ Login succeeded, obtained JWT token")
|
||||
return token
|
||||
else:
|
||||
print(f"❌ Login failed: {response.status_code} - {response.text}")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"❌ Login exception: {e}")
|
||||
return None
|
||||
|
||||
def test_api_key_management(token):
|
||||
"""Test API key management"""
|
||||
print("\n🔑 Testing API key management...")
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
try:
|
||||
# Create API key
|
||||
key_data = {"description": "Test API key"}
|
||||
response = requests.post(
|
||||
f"{BASE_URL}/api/keys",
|
||||
json=key_data,
|
||||
headers=headers
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
api_key = data["key"]
|
||||
key_id = data["id"]
|
||||
print(f"✅ API key created successfully: {api_key[:20]}...")
|
||||
|
||||
# List API keys
|
||||
response = requests.get(f"{BASE_URL}/api/keys", headers=headers)
|
||||
if response.status_code == 200:
|
||||
keys_data = response.json()
|
||||
print(f"✅ API keys listed successfully, total {len(keys_data['keys'])}")
|
||||
|
||||
# Delete API key
|
||||
response = requests.delete(f"{BASE_URL}/api/keys/{key_id}", headers=headers)
|
||||
if response.status_code == 200:
|
||||
print(f"✅ API key deleted successfully")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ API key deletion failed: {response.status_code}")
|
||||
return False
|
||||
else:
|
||||
print(f"❌ API key listing failed: {response.status_code}")
|
||||
return False
|
||||
else:
|
||||
print(f"❌ API key creation failed: {response.status_code} - {response.text}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ API key management exception: {e}")
|
||||
return False
|
||||
|
||||
def test_project_management(token):
|
||||
"""Test project management"""
|
||||
print("\n📁 Testing project management...")
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
try:
|
||||
# Create project
|
||||
project_data = {
|
||||
"name": "Test Project",
|
||||
"jenkinsJob": "test-job",
|
||||
"giteaRepo": "test-owner/test-repo"
|
||||
}
|
||||
response = requests.post(
|
||||
f"{BASE_URL}/api/projects/",
|
||||
json=project_data,
|
||||
headers=headers
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
project_id = data["id"]
|
||||
print(f"✅ Project created successfully: {data['name']}")
|
||||
|
||||
# List projects
|
||||
response = requests.get(f"{BASE_URL}/api/projects/", headers=headers)
|
||||
if response.status_code == 200:
|
||||
projects_data = response.json()
|
||||
print(f"✅ Projects listed successfully, total {len(projects_data['projects'])}")
|
||||
|
||||
# Delete project
|
||||
response = requests.delete(f"{BASE_URL}/api/projects/{project_id}", headers=headers)
|
||||
if response.status_code == 200:
|
||||
print(f"✅ Project deleted successfully")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Project deletion failed: {response.status_code}")
|
||||
return False
|
||||
else:
|
||||
print(f"❌ Project listing failed: {response.status_code}")
|
||||
return False
|
||||
else:
|
||||
print(f"❌ Project creation failed: {response.status_code} - {response.text}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Project management exception: {e}")
|
||||
return False
|
||||
|
||||
def test_stats(token):
|
||||
"""Test statistics information"""
|
||||
print("\n📊 Testing statistics information...")
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
try:
|
||||
response = requests.get(f"{BASE_URL}/api/stats", headers=headers)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
print(f"✅ Statistics retrieved successfully:")
|
||||
print(f" Total projects: {data['total_projects']}")
|
||||
print(f" API keys: {data['total_api_keys']}")
|
||||
print(f" Triggers today: {data['today_triggers']}")
|
||||
print(f" Successful triggers: {data['successful_triggers']}")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Statistics retrieval failed: {response.status_code}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Statistics exception: {e}")
|
||||
return False
|
||||
|
||||
def test_logs(token):
|
||||
"""Test log functionality"""
|
||||
print("\n📝 Testing log functionality...")
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
try:
|
||||
response = requests.get(f"{BASE_URL}/api/logs", headers=headers)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
print(f"✅ Logs retrieved successfully, total {len(data['logs'])} records")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Log retrieval failed: {response.status_code}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Log functionality exception: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main test function"""
|
||||
print("🚀 Gitea Webhook Ambassador Enhanced Feature Test")
|
||||
print("=" * 50)
|
||||
|
||||
# Test health check
|
||||
if not test_health_check():
|
||||
print("❌ Health check failed, service may not be running")
|
||||
return
|
||||
|
||||
# Test login
|
||||
token = test_login()
|
||||
if not token:
|
||||
print("❌ Login failed, cannot continue testing")
|
||||
return
|
||||
|
||||
# Test features
|
||||
test_api_key_management(token)
|
||||
test_project_management(token)
|
||||
test_stats(token)
|
||||
test_logs(token)
|
||||
|
||||
print("\n" + "=" * 50)
|
||||
print("🎉 Enhanced feature test completed!")
|
||||
print("\n📋 Implemented features:")
|
||||
print(" ✅ Web login interface")
|
||||
print(" ✅ Database storage for API keys")
|
||||
print(" ✅ Extended JWT validity (7 days)")
|
||||
print(" ✅ Frontend dashboard")
|
||||
print(" ✅ Project management")
|
||||
print(" ✅ API key management")
|
||||
print(" ✅ Log viewing")
|
||||
print(" ✅ Health status monitoring")
|
||||
print("\n🌐 Access URLs:")
|
||||
print(f" Login page: {BASE_URL}/login")
|
||||
print(f" Dashboard: {BASE_URL}/dashboard")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -1,307 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Webhook feature test script
|
||||
Used to verify all features of Gitea Webhook Ambassador
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import httpx
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
# Test configuration
|
||||
BASE_URL = "http://localhost:8000"
|
||||
WEBHOOK_SECRET = "your-secret-key-here-make-it-long-and-random"
|
||||
|
||||
# Test data
|
||||
TEST_WEBHOOK_DATA = {
|
||||
"ref": "refs/heads/dev",
|
||||
"before": "abc1234567890abcdef1234567890abcdef123456",
|
||||
"after": "def1234567890abcdef1234567890abcdef123456",
|
||||
"compare_url": "https://gitea.freeleaps.com/freeleaps/test-project/compare/abc123...def123",
|
||||
"commits": [
|
||||
{
|
||||
"id": "def1234567890abcdef1234567890abcdef123456",
|
||||
"message": "feat: add new feature",
|
||||
"url": "https://gitea.freeleaps.com/freeleaps/test-project/commit/def1234567890abcdef1234567890abcdef123456",
|
||||
"author": {
|
||||
"id": 1,
|
||||
"login": "developer",
|
||||
"full_name": "Test Developer",
|
||||
"email": "dev@freeleaps.com"
|
||||
}
|
||||
}
|
||||
],
|
||||
"repository": {
|
||||
"id": 1,
|
||||
"name": "test-project",
|
||||
"owner": {
|
||||
"id": 1,
|
||||
"login": "freeleaps",
|
||||
"full_name": "Freeleaps Team",
|
||||
"email": "team@freeleaps.com"
|
||||
},
|
||||
"full_name": "freeleaps/test-project",
|
||||
"private": False,
|
||||
"clone_url": "https://gitea.freeleaps.com/freeleaps/test-project.git",
|
||||
"ssh_url": "git@gitea.freeleaps.com:freeleaps/test-project.git",
|
||||
"html_url": "https://gitea.freeleaps.com/freeleaps/test-project",
|
||||
"default_branch": "main"
|
||||
},
|
||||
"pusher": {
|
||||
"id": 1,
|
||||
"login": "developer",
|
||||
"full_name": "Test Developer",
|
||||
"email": "dev@freeleaps.com"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async def test_health_check():
|
||||
"""Test health check"""
|
||||
print("🔍 Testing health check...")
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.get(f"{BASE_URL}/health")
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
print(f"✅ Health check passed: {data['status']}")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Health check failed: {response.status_code}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Health check exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def test_queue_status():
|
||||
"""Test queue status"""
|
||||
print("🔍 Testing queue status...")
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.get(f"{BASE_URL}/health/queue")
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
print(f"✅ Queue status: {data['queue_stats']}")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Queue status check failed: {response.status_code}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Queue status exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def test_webhook_endpoint():
|
||||
"""Test webhook endpoint"""
|
||||
print("🔍 Testing webhook endpoint...")
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"X-Gitea-Signature": WEBHOOK_SECRET
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.post(
|
||||
f"{BASE_URL}/webhook/gitea",
|
||||
headers=headers,
|
||||
json=TEST_WEBHOOK_DATA
|
||||
)
|
||||
|
||||
print(f"📊 Response status: {response.status_code}")
|
||||
print(f"📊 Response content: {response.text}")
|
||||
|
||||
if response.status_code in [200, 202]:
|
||||
print("✅ Webhook endpoint test passed")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Webhook endpoint test failed: {response.status_code}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Webhook endpoint exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def test_metrics_endpoint():
|
||||
"""Test metrics endpoint"""
|
||||
print("🔍 Testing metrics endpoint...")
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.get(f"{BASE_URL}/metrics")
|
||||
if response.status_code == 200:
|
||||
print("✅ Metrics endpoint test passed")
|
||||
# Print some key metrics
|
||||
content = response.text
|
||||
for line in content.split('\n'):
|
||||
if 'webhook_requests_total' in line or 'queue_size' in line:
|
||||
print(f"📊 {line}")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Metrics endpoint test failed: {response.status_code}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Metrics endpoint exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def test_deduplication():
|
||||
"""Test deduplication feature"""
|
||||
print("🔍 Testing deduplication feature...")
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"X-Gitea-Signature": WEBHOOK_SECRET
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
# First request
|
||||
print("📤 Sending first request...")
|
||||
response1 = await client.post(
|
||||
f"{BASE_URL}/webhook/gitea",
|
||||
headers=headers,
|
||||
json=TEST_WEBHOOK_DATA
|
||||
)
|
||||
print(f"📊 First response: {response1.status_code}")
|
||||
|
||||
# Wait one second
|
||||
await asyncio.sleep(1)
|
||||
|
||||
# Second request (same data, should be deduplicated)
|
||||
print("📤 Sending second request (same data)...")
|
||||
response2 = await client.post(
|
||||
f"{BASE_URL}/webhook/gitea",
|
||||
headers=headers,
|
||||
json=TEST_WEBHOOK_DATA
|
||||
)
|
||||
print(f"📊 Second response: {response2.status_code}")
|
||||
|
||||
# Modify commit hash, send third request
|
||||
modified_data = TEST_WEBHOOK_DATA.copy()
|
||||
modified_data["after"] = "ghi1234567890abcdef1234567890abcdef123456"
|
||||
|
||||
print("📤 Sending third request (different commit hash)...")
|
||||
response3 = await client.post(
|
||||
f"{BASE_URL}/webhook/gitea",
|
||||
headers=headers,
|
||||
json=modified_data
|
||||
)
|
||||
print(f"📊 Third response: {response3.status_code}")
|
||||
|
||||
print("✅ Deduplication feature test completed")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Deduplication feature exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def test_invalid_webhook():
|
||||
"""Test invalid webhook requests"""
|
||||
print("🔍 Testing invalid webhook requests...")
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
# Test missing signature
|
||||
print("📤 Testing missing signature...")
|
||||
response1 = await client.post(
|
||||
f"{BASE_URL}/webhook/gitea",
|
||||
headers={"Content-Type": "application/json"},
|
||||
json=TEST_WEBHOOK_DATA
|
||||
)
|
||||
print(f"📊 Missing signature response: {response1.status_code}")
|
||||
|
||||
# Test wrong signature
|
||||
print("📤 Testing wrong signature...")
|
||||
response2 = await client.post(
|
||||
f"{BASE_URL}/webhook/gitea",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Gitea-Signature": "wrong-secret"
|
||||
},
|
||||
json=TEST_WEBHOOK_DATA
|
||||
)
|
||||
print(f"📊 Wrong signature response: {response2.status_code}")
|
||||
|
||||
# Test invalid JSON
|
||||
print("📤 Testing invalid JSON...")
|
||||
response3 = await client.post(
|
||||
f"{BASE_URL}/webhook/gitea",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Gitea-Signature": WEBHOOK_SECRET
|
||||
},
|
||||
content="invalid json"
|
||||
)
|
||||
print(f"📊 Invalid JSON response: {response3.status_code}")
|
||||
|
||||
print("✅ Invalid request tests completed")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Invalid request tests exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main test function"""
|
||||
print("🚀 Starting Gitea Webhook Ambassador feature tests")
|
||||
print("=" * 50)
|
||||
|
||||
tests = [
|
||||
("Health Check", test_health_check),
|
||||
("Queue Status", test_queue_status),
|
||||
("Webhook Endpoint", test_webhook_endpoint),
|
||||
("Metrics", test_metrics_endpoint),
|
||||
("Deduplication", test_deduplication),
|
||||
("Invalid Requests", test_invalid_webhook),
|
||||
]
|
||||
|
||||
results = []
|
||||
|
||||
for test_name, test_func in tests:
|
||||
print(f"\n🧪 {test_name}")
|
||||
print("-" * 30)
|
||||
|
||||
try:
|
||||
result = await test_func()
|
||||
results.append((test_name, result))
|
||||
except Exception as e:
|
||||
print(f"❌ {test_name} test exception: {e}")
|
||||
results.append((test_name, False))
|
||||
|
||||
# Wait a bit before next test
|
||||
await asyncio.sleep(1)
|
||||
|
||||
# Output test results
|
||||
print("\n" + "=" * 50)
|
||||
print("📊 Test Results Summary")
|
||||
print("=" * 50)
|
||||
|
||||
passed = 0
|
||||
total = len(results)
|
||||
|
||||
for test_name, result in results:
|
||||
status = "✅ Passed" if result else "❌ Failed"
|
||||
print(f"{test_name}: {status}")
|
||||
if result:
|
||||
passed += 1
|
||||
|
||||
print(f"\n📈 Overall: {passed}/{total} tests passed")
|
||||
|
||||
if passed == total:
|
||||
print("🎉 All tests passed! Service is running normally.")
|
||||
else:
|
||||
print("⚠️ Some tests failed, please check service configuration and logs.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Run tests
|
||||
asyncio.run(main())
|
||||
Loading…
Reference in New Issue
Block a user