first commit

This commit is contained in:
2026-01-16 22:20:18 +03:00
commit 5d437e5e28
56 changed files with 4463 additions and 0 deletions

View File

@@ -0,0 +1,124 @@
import logging
from datetime import timedelta
from django.utils import timezone
from django.db.models import Avg, Min, Max
from decimal import Decimal
from monitor.models import BitcoinPrice, MarketAnalysis
logger = logging.getLogger(__name__)
class MarketAnalyzer:
"""Service for analyzing Bitcoin market data."""
def __init__(self, threshold_percent=15.0):
self.threshold_percent = Decimal(str(threshold_percent))
def analyze_market(self, period='hourly'):
"""Analyze the Bitcoin market for a given period."""
try:
# Get data for the period
time_filter = self._get_time_filter(period)
prices = BitcoinPrice.objects.filter(timestamp__gte=time_filter)
if not prices.exists():
logger.warning(f"No price data available for {period} analysis")
return None
# Get latest price
latest_price = prices.latest('timestamp')
# Calculate statistics
stats = prices.aggregate(
avg=Avg('price_usd'),
min=Min('price_usd'),
max=Max('price_usd'),
)
avg_price = stats['avg'] or latest_price.price_usd
min_price = stats['min'] or latest_price.price_usd
max_price = stats['max'] or latest_price.price_usd
# Calculate thresholds
lower_threshold = avg_price * (1 - self.threshold_percent / 100)
upper_threshold = avg_price * (1 + self.threshold_percent / 100)
# Determine status
current_price = latest_price.price_usd
if current_price < lower_threshold:
status = 'dip'
is_event = True
event_type = 'dip_below'
elif current_price > upper_threshold:
status = 'peak'
is_event = True
event_type = 'rise_above'
else:
status = 'neutral'
is_event = False
event_type = ''
# Save analysis
analysis = MarketAnalysis.objects.create(
period=period,
current_price=current_price,
average_price=avg_price,
min_price=min_price,
max_price=max_price,
status=status,
threshold_percent=self.threshold_percent,
lower_threshold=lower_threshold,
upper_threshold=upper_threshold,
is_event=is_event,
event_type=event_type,
)
logger.info(f"Market analysis saved: {status} at ${current_price}")
return analysis
except BitcoinPrice.DoesNotExist:
logger.error("No Bitcoin price data found")
return None
except Exception as e:
logger.error(f"Error analyzing market: {e}")
return None
def _get_time_filter(self, period):
"""Get datetime filter based on period."""
now = timezone.now()
if period == 'hourly':
return now - timedelta(hours=1)
elif period == 'daily':
return now - timedelta(days=1)
elif period == 'weekly':
return now - timedelta(weeks=1)
elif period == 'yearly':
return now - timedelta(days=365)
else:
return now - timedelta(days=1) # Default to daily
def get_latest_analysis(self, period='hourly'):
"""Get the latest analysis for a period."""
try:
return MarketAnalysis.objects.filter(period=period).latest('timestamp')
except MarketAnalysis.DoesNotExist:
return None
def get_analysis_summary(self):
"""Get summary of all analyses."""
summary = {}
for period in ['hourly', 'daily', 'weekly', 'yearly']:
analysis = self.get_latest_analysis(period)
if analysis:
summary[period] = {
'status': analysis.status,
'current_price': float(analysis.current_price),
'average_price': float(analysis.average_price),
'threshold_percent': float(analysis.threshold_percent),
'is_event': analysis.is_event,
}
return summary

View File

@@ -0,0 +1,376 @@
import logging
from datetime import datetime, timezone, timedelta
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.conf import settings
from django.urls import reverse
from monitor.models import NotificationPreference, EmailNotification
logger = logging.getLogger(__name__)
class EmailService:
"""Service for sending Bitcoin monitor notifications."""
def __init__(self):
self.site_domain = getattr(settings, 'SITE_DOMAIN', 'localhost:8000')
self.from_email = getattr(settings, 'DEFAULT_FROM_EMAIL', 'noreply@bitcoin-monitor.com')
def get_notification_preferences(self):
"""Get all active notification preferences."""
return NotificationPreference.objects.filter(is_active=True)
def create_email_notification_record(self, recipient, subject, notification_type):
"""Create a record of the email to be sent."""
return EmailNotification.objects.create(
recipient=recipient,
subject=subject,
notification_type=notification_type,
status='pending'
)
def update_notification_status(self, email_notification, status, error_message=''):
"""Update the status of an email notification."""
email_notification.status = status
email_notification.error_message = error_message
if status == 'sent':
email_notification.sent_at = datetime.now(timezone.utc)
email_notification.save()
def render_email_template(self, template_name, context):
"""Render email template with context."""
# Add common context variables
context.update({
'dashboard_url': f'http://{self.site_domain}',
'admin_url': f'http://{self.site_domain}/admin/',
'unsubscribe_url': f'http://{self.site_domain}/api/notifications/unsubscribe/',
})
# Render HTML and plain text versions
html_content = render_to_string(f'emails/{template_name}', context)
text_content = strip_tags(html_content)
return html_content, text_content
def send_event_alert(self, analysis):
"""Send event alert email."""
try:
# Get recipients who want event alerts
preferences = self.get_notification_preferences().filter(
receive_event_alerts=True
)
if not preferences.exists():
logger.info("No recipients for event alerts")
return
# Calculate percent change
percent_change = abs(
(float(analysis.current_price) - float(analysis.average_price)) /
float(analysis.average_price) * 100
)
# Prepare context
context = {
'alert_type': analysis.status,
'current_price': float(analysis.current_price),
'yearly_average': float(analysis.average_price),
'threshold_percent': float(analysis.threshold_percent),
'lower_threshold': float(analysis.lower_threshold),
'upper_threshold': float(analysis.upper_threshold),
'percent_change': percent_change,
'detected_at': analysis.timestamp,
'previous_status': 'neutral', # Could be tracked
'recommendation': self._get_event_recommendation(analysis),
}
# Send to each recipient
for pref in preferences:
try:
email_notification = self.create_email_notification_record(
recipient=pref.email_address,
subject=f"🚨 Bitcoin {analysis.status.upper()} Alert: "
f"${float(analysis.current_price):.2f}",
notification_type='event'
)
self._send_single_email(
email_notification,
'event_alert.html',
context
)
except Exception as e:
logger.error(f"Failed to send event alert to {pref.email_address}: {e}")
logger.info(f"Event alert sent to {preferences.count()} recipients")
except Exception as e:
logger.error(f"Error sending event alert: {e}")
def send_system_alert(self, alert_title, alert_message, severity='warning',
affected_component='system', error_code=None):
"""Send system alert email."""
try:
# Get recipients who want system alerts
preferences = self.get_notification_preferences().filter(
receive_system_alerts=True
)
if not preferences.exists():
logger.info("No recipients for system alerts")
return
# Prepare context
context = {
'alert_title': alert_title,
'alert_message': alert_message,
'severity': severity,
'affected_component': affected_component,
'error_code': error_code,
'occurred_at': datetime.now(timezone.utc),
'troubleshooting_steps': self._get_troubleshooting_steps(severity),
}
# Send to each recipient
for pref in preferences:
try:
email_notification = self.create_email_notification_record(
recipient=pref.email_address,
subject=f"⚠️ Bitcoin Monitor System Alert: {alert_title}",
notification_type='system'
)
self._send_single_email(
email_notification,
'system_alert.html',
context
)
except Exception as e:
logger.error(f"Failed to send system alert to {pref.email_address}: {e}")
logger.info(f"System alert sent to {preferences.count()} recipients")
except Exception as e:
logger.error(f"Error sending system alert: {e}")
def send_daily_digest(self):
"""Send daily digest email."""
try:
# Get recipients who want daily digest
preferences = self.get_notification_preferences().filter(
receive_daily_digest=True
)
if not preferences.exists():
logger.info("No recipients for daily digest")
return
# Get data for the past 24 hours
yesterday = datetime.now(timezone.utc) - timedelta(hours=24)
from monitor.models import BitcoinPrice, MarketAnalysis, SystemStatus
from django.db.models import Max, Min
# Get price stats
prices = BitcoinPrice.objects.filter(
timestamp__gte=yesterday
)
if prices.exists():
current_price = prices.latest('timestamp').price_usd
daily_high = prices.aggregate(Max('price_usd'))['price_usd__max']
daily_low = prices.aggregate(Min('price_usd'))['price_usd__min']
daily_change = ((float(daily_high) - float(daily_low)) / float(daily_low) * 100)
else:
current_price = daily_high = daily_low = 0
daily_change = 0
# Get market status
latest_analysis = MarketAnalysis.objects.filter(
period='hourly'
).order_by('-timestamp').first()
market_status = latest_analysis.status if latest_analysis else 'neutral'
# Get event count
events_count = MarketAnalysis.objects.filter(
is_event=True,
timestamp__gte=yesterday
).count()
# Get system stats
price_fetches = prices.count()
successful_fetches = SystemStatus.objects.filter(
is_healthy=True,
timestamp__gte=yesterday
).count()
total_checks = SystemStatus.objects.filter(
timestamp__gte=yesterday
).count()
uptime_percentage = (successful_fetches / total_checks * 100) if total_checks > 0 else 0
# Get today's events
events_today = []
for event in MarketAnalysis.objects.filter(
is_event=True,
timestamp__gte=yesterday
).order_by('-timestamp')[:5]:
events_today.append({
'type': event.event_type,
'price': float(event.current_price),
'time': event.timestamp,
})
# Prepare context
context = {
'date': datetime.now(timezone.utc),
'summary_period': 'Last 24 hours',
'market_status': market_status,
'current_price': float(current_price),
'daily_high': float(daily_high) if daily_high else 0,
'daily_low': float(daily_low) if daily_low else 0,
'daily_change': daily_change,
'events_count': events_count,
'price_fetches': price_fetches,
'uptime_percentage': uptime_percentage,
'events_today': events_today,
'market_insight': self._get_daily_market_insight(market_status),
}
# Send to each recipient
for pref in preferences:
try:
email_notification = self.create_email_notification_record(
recipient=pref.email_address,
subject=f"📊 Bitcoin Daily Digest - {datetime.now(timezone.utc).strftime('%b %d, %Y')}",
notification_type='digest'
)
self._send_single_email(
email_notification,
'daily_digest.html',
context
)
except Exception as e:
logger.error(f"Failed to send daily digest to {pref.email_address}: {e}")
logger.info(f"Daily digest sent to {preferences.count()} recipients")
except Exception as e:
logger.error(f"Error sending daily digest: {e}")
# Try to send system alert about the failure
self.send_system_alert(
alert_title="Daily Digest Failed",
alert_message=f"Failed to send daily digest: {str(e)}",
severity='error',
affected_component='email_service'
)
def send_test_email(self, recipient_email):
"""Send a test email to verify configuration."""
try:
email_notification = self.create_email_notification_record(
recipient=recipient_email,
subject="✅ Bitcoin Monitor - Test Email",
notification_type='test'
)
context = {
'test_message': 'This is a test email from Bitcoin Monitor.',
'timestamp': datetime.now(timezone.utc),
}
self._send_single_email(
email_notification,
'system_alert.html', # Reuse system template for test
context
)
return True, "Test email sent successfully"
except Exception as e:
logger.error(f"Error sending test email: {e}")
return False, str(e)
def _send_single_email(self, email_notification, template_name, context):
"""Send a single email and update notification record."""
try:
# Render email content
html_content, text_content = self.render_email_template(template_name, context)
# Create email
email = EmailMultiAlternatives(
subject=email_notification.subject,
body=text_content,
from_email=self.from_email,
to=[email_notification.recipient],
)
email.attach_alternative(html_content, "text/html")
# Send email
email.send()
# Update notification record
email_notification.content_html = html_content
email_notification.content_text = text_content
self.update_notification_status(email_notification, 'sent')
logger.info(f"Email sent to {email_notification.recipient}")
except Exception as e:
logger.error(f"Failed to send email to {email_notification.recipient}: {e}")
email_notification.retry_count += 1
if email_notification.retry_count < 3:
self.update_notification_status(
email_notification,
'retrying',
f"Retry {email_notification.retry_count}/3: {str(e)}"
)
else:
self.update_notification_status(
email_notification,
'failed',
f"Failed after 3 retries: {str(e)}"
)
raise
def _get_event_recommendation(self, analysis):
"""Get recommendation based on event type."""
if analysis.status == 'dip':
return "Price is significantly below yearly average. Consider buying opportunity."
elif analysis.status == 'peak':
return "Price is significantly above yearly average. Consider taking profits."
else:
return "Price has returned to normal range. Monitor for further changes."
def _get_troubleshooting_steps(self, severity):
"""Get troubleshooting steps based on severity."""
steps = [
"Check the admin panel for detailed error logs",
"Verify internet connection and API access",
"Check Redis and Celery worker status",
]
if severity == 'critical':
steps.append("Restart the monitoring services if necessary")
steps.append("Check database connection and disk space")
return steps
def _get_daily_market_insight(self, market_status):
"""Get daily market insight based on status."""
insights = {
'dip': "Market shows buying opportunity with prices below yearly average. "
"Monitor for further dips before entering.",
'peak': "Market at elevated levels. Consider profit-taking strategies "
"and set stop-loss orders.",
'neutral': "Market trading within normal range. Good time to review "
"portfolio and set alerts for future movements.",
}
return insights.get(market_status, "Monitor market for emerging trends.")

View File

@@ -0,0 +1,301 @@
import logging
import time
from datetime import datetime, timezone, timedelta
from typing import List, Dict, Optional
import requests
from django.db import transaction
from django.utils import timezone as django_timezone
from decimal import Decimal
from monitor.models import BitcoinPrice
logger = logging.getLogger(__name__)
class HistoricalDataFetcher:
"""Fetches historical Bitcoin price data."""
def __init__(self):
self.base_url = "https://api.coingecko.com/api/v3"
self.session = requests.Session()
self.session.headers.update({
'User-Agent': 'BitcoinMonitor/1.0',
'Accept': 'application/json',
})
def fetch_historical_data(self, days: int = 365) -> List[Dict]:
"""
Fetch historical Bitcoin data for specified number of days.
Args:
days: Number of days of historical data to fetch
Returns:
List of price data dictionaries
"""
try:
logger.info(f"Fetching {days} days of historical Bitcoin data...")
url = f"{self.base_url}/coins/bitcoin/market_chart"
params = {
'vs_currency': 'usd',
'days': days,
'interval': 'daily',
}
response = self.session.get(url, params=params, timeout=30)
response.raise_for_status()
data = response.json()
historical_data = []
# Process prices
for price_point in data.get('prices', []):
timestamp = datetime.fromtimestamp(price_point[0] / 1000, timezone.utc)
price = price_point[1]
historical_data.append({
'timestamp': timestamp,
'price_usd': price,
'volume': None,
'market_cap': None,
})
# Add volume data if available
volumes = data.get('total_volumes', [])
for i, (timestamp_ms, volume) in enumerate(volumes):
if i < len(historical_data):
historical_data[i]['volume'] = volume
# Add market cap data if available
market_caps = data.get('market_caps', [])
for i, (timestamp_ms, market_cap) in enumerate(market_caps):
if i < len(historical_data):
historical_data[i]['market_cap'] = market_cap
logger.info(f"Fetched {len(historical_data)} historical price points")
return historical_data
except requests.exceptions.RequestException as e:
logger.error(f"Request error fetching historical data: {e}")
return []
except Exception as e:
logger.error(f"Error fetching historical data: {e}")
return []
def fetch_historical_data_range(self, start_date: datetime, end_date: datetime) -> List[Dict]:
"""
Fetch historical data for a specific date range.
Note: CoinGecko API doesn't support arbitrary date ranges directly,
so we fetch maximum days and filter.
"""
# Calculate days between dates
days_difference = (end_date - start_date).days
# Fetch more data than needed to ensure we have the range
all_data = self.fetch_historical_data(days=days_difference + 100)
# Filter to date range
filtered_data = [
point for point in all_data
if start_date <= point['timestamp'] <= end_date
]
return filtered_data
def save_historical_data(self, historical_data: List[Dict], clear_existing: bool = False) -> Dict:
"""
Save historical data to database.
Args:
historical_data: List of price data dictionaries
clear_existing: Whether to clear existing data before saving
Returns:
Dictionary with statistics about the operation
"""
if not historical_data:
logger.warning("No historical data to save")
return {'saved': 0, 'skipped': 0, 'errors': 0}
try:
with transaction.atomic():
if clear_existing:
deleted_count, _ = BitcoinPrice.objects.all().delete()
logger.info(f"Cleared {deleted_count} existing price records")
saved_count = 0
skipped_count = 0
error_count = 0
for data_point in historical_data:
try:
# Check if price already exists for this timestamp
exists = BitcoinPrice.objects.filter(
timestamp=data_point['timestamp']
).exists()
if exists:
skipped_count += 1
continue
# Create BitcoinPrice object
BitcoinPrice.objects.create(
timestamp=data_point['timestamp'],
price_usd=Decimal(str(data_point['price_usd'])),
volume=Decimal(str(data_point['volume'])) if data_point.get('volume') else None,
market_cap=Decimal(str(data_point['market_cap'])) if data_point.get('market_cap') else None,
)
saved_count += 1
# Log progress every 50 records
if saved_count % 50 == 0:
logger.info(f"Saved {saved_count} historical records...")
except Exception as e:
error_count += 1
logger.error(f"Error saving data point {data_point.get('timestamp')}: {e}")
logger.info(f"Historical data saved: {saved_count} new, {skipped_count} existing, {error_count} errors")
return {
'saved': saved_count,
'skipped': skipped_count,
'errors': error_count,
'total': len(historical_data),
}
except Exception as e:
logger.error(f"Transaction error saving historical data: {e}")
return {'saved': 0, 'skipped': 0, 'errors': len(historical_data), 'total': len(historical_data)}
def generate_test_data(self, days: int = 30, base_price: float = 45000) -> List[Dict]:
"""
Generate synthetic test data for development.
Args:
days: Number of days of test data
base_price: Base price for the data
Returns:
List of synthetic price data
"""
import random
from datetime import timedelta
logger.info(f"Generating {days} days of synthetic test data...")
test_data = []
now = django_timezone.now()
for i in range(days * 24): # Generate hourly data
timestamp = now - timedelta(hours=i)
# Create realistic price fluctuations (±5%)
variation = random.uniform(0.95, 1.05)
price = base_price * variation
# Generate volume and market cap with some randomness
volume = random.uniform(20000000000, 40000000000)
market_cap = random.uniform(800000000000, 900000000000)
test_data.append({
'timestamp': timestamp,
'price_usd': round(price, 2),
'volume': round(volume, 2),
'market_cap': round(market_cap, 2),
})
# Reverse to have chronological order
test_data.reverse()
logger.info(f"Generated {len(test_data)} synthetic data points")
return test_data
def analyze_historical_data_quality(self, historical_data: List[Dict]) -> Dict:
"""
Analyze the quality of historical data.
Args:
historical_data: List of price data dictionaries
Returns:
Dictionary with quality metrics
"""
if not historical_data:
return {'error': 'No data to analyze'}
# Sort by timestamp
sorted_data = sorted(historical_data, key=lambda x: x['timestamp'])
timestamps = [d['timestamp'] for d in sorted_data]
prices = [d['price_usd'] for d in sorted_data]
# Calculate metrics
min_price = min(prices)
max_price = max(prices)
avg_price = sum(prices) / len(prices)
# Check for gaps in timestamps
time_gaps = []
for i in range(1, len(timestamps)):
gap = (timestamps[i] - timestamps[i-1]).total_seconds() / 3600 # hours
if gap > 24: # More than 1 day gap
time_gaps.append({
'from': timestamps[i-1],
'to': timestamps[i],
'gap_hours': gap,
})
# Check for missing values
missing_prices = sum(1 for d in sorted_data if d['price_usd'] is None)
missing_volumes = sum(1 for d in sorted_data if d.get('volume') is None)
missing_market_caps = sum(1 for d in sorted_data if d.get('market_cap') is None)
return {
'total_points': len(historical_data),
'date_range': {
'start': timestamps[0],
'end': timestamps[-1],
'days': (timestamps[-1] - timestamps[0]).days,
},
'price_stats': {
'min': min_price,
'max': max_price,
'average': avg_price,
'range_percent': ((max_price - min_price) / min_price * 100),
},
'data_quality': {
'missing_prices': missing_prices,
'missing_volumes': missing_volumes,
'missing_market_caps': missing_market_caps,
'time_gaps': len(time_gaps),
'time_gaps_details': time_gaps[:5], # First 5 gaps
},
'suggestions': self._generate_data_quality_suggestions({
'missing_prices': missing_prices,
'time_gaps': len(time_gaps),
'total_points': len(historical_data),
})
}
def _generate_data_quality_suggestions(self, metrics: Dict) -> List[str]:
"""Generate suggestions based on data quality metrics."""
suggestions = []
if metrics['missing_prices'] > 0:
suggestions.append(f"Found {metrics['missing_prices']} missing prices. Consider filling gaps.")
if metrics['time_gaps'] > 0:
suggestions.append(f"Found {metrics['time_gaps']} time gaps. Data may not be continuous.")
if metrics['total_points'] < 30:
suggestions.append("Less than 30 data points. Consider fetching more data.")
if not suggestions:
suggestions.append("Data quality looks good!")
return suggestions