first commit
This commit is contained in:
156
monitor/management/commands/check_data_quality.py
Normal file
156
monitor/management/commands/check_data_quality.py
Normal file
@@ -0,0 +1,156 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from monitor.services.historical_data import HistoricalDataFetcher
|
||||
from monitor.models import BitcoinPrice
|
||||
from django.db.models import Min, Max, Count
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Check quality and statistics of Bitcoin price data in database'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--fetch-sample',
|
||||
action='store_true',
|
||||
help='Fetch sample data for comparison'
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write(self.style.HTTP_INFO("Bitcoin Data Quality Check"))
|
||||
self.stdout.write("=" * 50)
|
||||
|
||||
# Get database statistics
|
||||
total_records = BitcoinPrice.objects.count()
|
||||
|
||||
if total_records == 0:
|
||||
self.stdout.write(self.style.ERROR("No Bitcoin price data in database."))
|
||||
self.stdout.write("Run: python manage.py load_historical_data")
|
||||
return
|
||||
|
||||
# Get date range
|
||||
date_range = BitcoinPrice.objects.aggregate(
|
||||
earliest=Min('timestamp'),
|
||||
latest=Max('timestamp')
|
||||
)
|
||||
|
||||
# Calculate time span
|
||||
if date_range['earliest'] and date_range['latest']:
|
||||
time_span = date_range['latest'] - date_range['earliest']
|
||||
days_span = time_span.days
|
||||
|
||||
self.stdout.write(f"Data range: {date_range['earliest'].strftime('%Y-%m-%d')} "
|
||||
f"to {date_range['latest'].strftime('%Y-%m-%d')}")
|
||||
self.stdout.write(f"Time span: {days_span} days")
|
||||
|
||||
self.stdout.write(f"Total records: {total_records}")
|
||||
|
||||
# Calculate records per day
|
||||
if days_span > 0:
|
||||
records_per_day = total_records / days_span
|
||||
self.stdout.write(f"Records per day: {records_per_day:.2f}")
|
||||
|
||||
if records_per_day < 0.9:
|
||||
self.stdout.write(
|
||||
self.style.WARNING("⚠️ Less than 1 record per day - data may be incomplete")
|
||||
)
|
||||
elif records_per_day > 24:
|
||||
self.stdout.write("📈 More than hourly data - good coverage")
|
||||
else:
|
||||
self.stdout.write("📊 Daily data coverage")
|
||||
|
||||
# Check for missing values
|
||||
missing_volume = BitcoinPrice.objects.filter(volume__isnull=True).count()
|
||||
missing_market_cap = BitcoinPrice.objects.filter(market_cap__isnull=True).count()
|
||||
|
||||
if missing_volume > 0:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f"Missing volume data: {missing_volume} records ({missing_volume/total_records*100:.1f}%)")
|
||||
)
|
||||
|
||||
if missing_market_cap > 0:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f"Missing market cap: {missing_market_cap} records ({missing_market_cap/total_records*100:.1f}%)")
|
||||
)
|
||||
|
||||
# Get price statistics
|
||||
prices = BitcoinPrice.objects.all().order_by('timestamp')
|
||||
price_list = [float(p.price_usd) for p in prices]
|
||||
|
||||
if price_list:
|
||||
min_price = min(price_list)
|
||||
max_price = max(price_list)
|
||||
avg_price = sum(price_list) / len(price_list)
|
||||
|
||||
self.stdout.write("\n" + self.style.SUCCESS("Price Statistics"))
|
||||
self.stdout.write("-" * 30)
|
||||
self.stdout.write(f"Minimum price: ${min_price:.2f}")
|
||||
self.stdout.write(f"Maximum price: ${max_price:.2f}")
|
||||
self.stdout.write(f"Average price: ${avg_price:.2f}")
|
||||
self.stdout.write(f"Price range: ${max_price - min_price:.2f} "
|
||||
f"({((max_price - min_price) / min_price * 100):.1f}%)")
|
||||
|
||||
# Check for time gaps
|
||||
time_gaps = []
|
||||
prev_timestamp = None
|
||||
|
||||
for price in prices.order_by('timestamp'):
|
||||
if prev_timestamp:
|
||||
gap_hours = (price.timestamp - prev_timestamp).total_seconds() / 3600
|
||||
if gap_hours > 24: # More than 1 day gap
|
||||
time_gaps.append({
|
||||
'from': prev_timestamp,
|
||||
'to': price.timestamp,
|
||||
'gap_days': gap_hours / 24,
|
||||
})
|
||||
prev_timestamp = price.timestamp
|
||||
|
||||
if time_gaps:
|
||||
self.stdout.write("\n" + self.style.WARNING("Time Gaps Detected"))
|
||||
self.stdout.write("-" * 30)
|
||||
for gap in time_gaps[:3]: # Show first 3 gaps
|
||||
self.stdout.write(
|
||||
f"Gap of {gap['gap_days']:.1f} days from "
|
||||
f"{gap['from'].strftime('%Y-%m-%d')} to {gap['to'].strftime('%Y-%m-%d')}"
|
||||
)
|
||||
|
||||
if len(time_gaps) > 3:
|
||||
self.stdout.write(f"... and {len(time_gaps) - 3} more gaps")
|
||||
|
||||
# Compare with fresh data if requested
|
||||
if options['fetch_sample']:
|
||||
self.stdout.write("\n" + self.style.INFO("Fetching sample data for comparison..."))
|
||||
|
||||
fetcher = HistoricalDataFetcher()
|
||||
sample_data = fetcher.fetch_historical_data(days=30)
|
||||
|
||||
if sample_data:
|
||||
self.stdout.write(f"Sample data points: {len(sample_data)}")
|
||||
|
||||
sample_prices = [d['price_usd'] for d in sample_data]
|
||||
sample_min = min(sample_prices)
|
||||
sample_max = max(sample_prices)
|
||||
sample_avg = sum(sample_prices) / len(sample_prices)
|
||||
|
||||
self.stdout.write(f"Sample min: ${sample_min:.2f}")
|
||||
self.stdout.write(f"Sample max: ${sample_max:.2f}")
|
||||
self.stdout.write(f"Sample avg: ${sample_avg:.2f}")
|
||||
|
||||
# Recommendations
|
||||
self.stdout.write("\n" + self.style.HTTP_INFO("Recommendations"))
|
||||
self.stdout.write("-" * 30)
|
||||
|
||||
if total_records < 100:
|
||||
self.stdout.write("1. Load more data: python manage.py load_historical_data --days 365")
|
||||
|
||||
if missing_volume > total_records * 0.5:
|
||||
self.stdout.write("2. Consider fetching data with volume information")
|
||||
|
||||
if time_gaps:
|
||||
self.stdout.write("3. Consider filling time gaps with additional data")
|
||||
|
||||
if total_records >= 100 and not time_gaps and missing_volume < total_records * 0.1:
|
||||
self.stdout.write("✅ Data quality looks good!")
|
||||
|
||||
self.stdout.write("\n" + self.style.SUCCESS("Quality check complete!"))
|
||||
211
monitor/management/commands/load_historical_data.py
Normal file
211
monitor/management/commands/load_historical_data.py
Normal file
@@ -0,0 +1,211 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from monitor.services.historical_data import HistoricalDataFetcher
|
||||
from monitor.services.analyzer import MarketAnalyzer
|
||||
from monitor.services.email_service import EmailService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Load historical Bitcoin price data into the database'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--days',
|
||||
type=int,
|
||||
default=365,
|
||||
help='Number of days of historical data to fetch (default: 365)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--clear',
|
||||
action='store_true',
|
||||
help='Clear existing data before loading new data'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--generate-test',
|
||||
action='store_true',
|
||||
help='Generate synthetic test data instead of fetching real data'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--test-days',
|
||||
type=int,
|
||||
default=30,
|
||||
help='Days of test data to generate (default: 30)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--analyze',
|
||||
action='store_true',
|
||||
help='Run market analysis on historical data after loading'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--notify',
|
||||
action='store_true',
|
||||
help='Send email notification when data loading is complete'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--quality-check',
|
||||
action='store_true',
|
||||
help='Perform data quality analysis'
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write(self.style.HTTP_INFO("Bitcoin Historical Data Loader"))
|
||||
self.stdout.write("=" * 50)
|
||||
|
||||
days = options['days']
|
||||
clear_existing = options['clear']
|
||||
generate_test = options['generate_test']
|
||||
test_days = options['test_days']
|
||||
analyze = options['analyze']
|
||||
notify = options['notify']
|
||||
quality_check = options['quality_check']
|
||||
|
||||
fetcher = HistoricalDataFetcher()
|
||||
|
||||
if generate_test:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f"Generating {test_days} days of synthetic test data..."
|
||||
)
|
||||
)
|
||||
historical_data = fetcher.generate_test_data(days=test_days)
|
||||
else:
|
||||
self.stdout.write(
|
||||
(f"Fetching {days} days of historical Bitcoin data...")
|
||||
)
|
||||
historical_data = fetcher.fetch_historical_data(days=days)
|
||||
|
||||
if not historical_data:
|
||||
self.stdout.write(
|
||||
self.style.ERROR("No data fetched. Check your internet connection and API status.")
|
||||
)
|
||||
return
|
||||
|
||||
# Perform quality check if requested
|
||||
if quality_check:
|
||||
self.stdout.write(self.style.INFO("Performing data quality analysis..."))
|
||||
quality_metrics = fetcher.analyze_historical_data_quality(historical_data)
|
||||
|
||||
self.stdout.write(f"Total data points: {quality_metrics['total_points']}")
|
||||
self.stdout.write(
|
||||
f"Date range: {quality_metrics['date_range']['start'].strftime('%Y-%m-%d')} "
|
||||
f"to {quality_metrics['date_range']['end'].strftime('%Y-%m-%d')} "
|
||||
f"({quality_metrics['date_range']['days']} days)"
|
||||
)
|
||||
|
||||
price_stats = quality_metrics['price_stats']
|
||||
self.stdout.write(f"Price range: ${price_stats['min']:.2f} - ${price_stats['max']:.2f}")
|
||||
self.stdout.write(f"Average price: ${price_stats['average']:.2f}")
|
||||
|
||||
if quality_metrics['data_quality']['missing_prices'] > 0:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f"Missing prices: {quality_metrics['data_quality']['missing_prices']}"
|
||||
)
|
||||
)
|
||||
|
||||
if quality_metrics['data_quality']['time_gaps'] > 0:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f"Time gaps found: {quality_metrics['data_quality']['time_gaps']}"
|
||||
)
|
||||
)
|
||||
|
||||
for suggestion in quality_metrics.get('suggestions', []):
|
||||
self.stdout.write(f"💡 {suggestion}")
|
||||
|
||||
# Save data to database
|
||||
self.stdout.write(("Saving data to database..."))
|
||||
|
||||
save_stats = fetcher.save_historical_data(
|
||||
historical_data=historical_data,
|
||||
clear_existing=clear_existing
|
||||
)
|
||||
|
||||
# Display save statistics
|
||||
self.stdout.write("\n" + self.style.SUCCESS("Data Load Summary"))
|
||||
self.stdout.write("-" * 30)
|
||||
self.stdout.write(f"Total records processed: {save_stats['total']}")
|
||||
self.stdout.write(f"New records saved: {save_stats['saved']}")
|
||||
self.stdout.write(f"Existing records skipped: {save_stats['skipped']}")
|
||||
self.stdout.write(f"Errors: {save_stats['errors']}")
|
||||
|
||||
if save_stats['errors'] > 0:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f"⚠️ {save_stats['errors']} records had errors and were not saved")
|
||||
)
|
||||
|
||||
# Run analysis if requested
|
||||
if analyze and save_stats['saved'] > 0:
|
||||
self.stdout.write("\n" + ("Running market analysis on historical data..."))
|
||||
|
||||
analyzer = MarketAnalyzer()
|
||||
analysis_count = 0
|
||||
|
||||
# Run analysis for different time periods
|
||||
for period in ['hourly', 'daily', 'weekly', 'yearly']:
|
||||
analysis = analyzer.analyze_market(period)
|
||||
if analysis:
|
||||
analysis_count += 1
|
||||
self.stdout.write(
|
||||
f" {period.capitalize()} analysis: {analysis.status} at ${analysis.current_price}"
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Completed {analysis_count} market analyses")
|
||||
)
|
||||
|
||||
# Send notification if requested
|
||||
if notify:
|
||||
self.stdout.write("\n" + self.style.INFO("Sending completion notification..."))
|
||||
|
||||
try:
|
||||
email_service = EmailService()
|
||||
subject = f"✅ Historical Data Loaded: {save_stats['saved']} records"
|
||||
|
||||
# Create a simple notification
|
||||
email_service.send_system_alert(
|
||||
alert_title="Historical Data Load Complete",
|
||||
alert_message=(
|
||||
f"Successfully loaded {save_stats['saved']} historical Bitcoin price records.\n"
|
||||
f"Date range: {days} days\n"
|
||||
f"Errors: {save_stats['errors']}\n"
|
||||
f"Total in database: {save_stats['total']}"
|
||||
),
|
||||
severity='info',
|
||||
affected_component='data_loader'
|
||||
)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Notification sent!"))
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"Failed to send notification: {e}")
|
||||
)
|
||||
|
||||
# Display database stats
|
||||
from monitor.models import BitcoinPrice
|
||||
total_records = BitcoinPrice.objects.count()
|
||||
latest_record = BitcoinPrice.objects.order_by('-timestamp').first()
|
||||
|
||||
self.stdout.write("\n" + self.style.SUCCESS("Database Status"))
|
||||
self.stdout.write("-" * 30)
|
||||
self.stdout.write(f"Total Bitcoin price records: {total_records}")
|
||||
|
||||
if latest_record:
|
||||
self.stdout.write(
|
||||
f"Latest price: ${latest_record.price_usd} "
|
||||
f"at {latest_record.timestamp.strftime('%Y-%m-%d %H:%M UTC')}"
|
||||
)
|
||||
|
||||
self.stdout.write("\n" + self.style.SUCCESS("✅ Historical data loading complete!"))
|
||||
|
||||
# Provide next steps
|
||||
self.stdout.write("\n" + self.style.HTTP_INFO("Next Steps:"))
|
||||
self.stdout.write("1. View data in admin: http://localhost:8000/admin/monitor/bitcoinprice/")
|
||||
self.stdout.write("2. Run analysis: python manage.py load_historical_data --analyze")
|
||||
self.stdout.write("3. View dashboard: http://localhost:8000/")
|
||||
self.stdout.write("4. Test email notifications by running an analysis")
|
||||
33
monitor/management/commands/load_sample_data.py
Normal file
33
monitor/management/commands/load_sample_data.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from monitor.models import BitcoinPrice
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import random
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Load sample Bitcoin price data'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Clear existing data
|
||||
BitcoinPrice.objects.all().delete()
|
||||
|
||||
# Create sample data (last 7 days)
|
||||
base_price = 45000
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
for i in range(168): # 7 days * 24 hours = 168 hours
|
||||
timestamp = now - timedelta(hours=i)
|
||||
# Random price variation ±5%
|
||||
variation = random.uniform(0.95, 1.05)
|
||||
price = round(base_price * variation, 2)
|
||||
|
||||
BitcoinPrice.objects.create(
|
||||
timestamp=timestamp,
|
||||
price_usd=price,
|
||||
volume=random.uniform(20000000000, 40000000000),
|
||||
market_cap=random.uniform(800000000000, 900000000000),
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f'Successfully created {BitcoinPrice.objects.count()} sample records')
|
||||
)
|
||||
28
monitor/management/commands/send_test_email.py
Normal file
28
monitor/management/commands/send_test_email.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from monitor.services.email_service import EmailService
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Send a test email to verify configuration'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--email',
|
||||
type=str,
|
||||
required=True,
|
||||
help='Email address to send test to'
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
email = options['email']
|
||||
email_service = EmailService()
|
||||
|
||||
self.stdout.write(f'Sending test email to {email}...')
|
||||
|
||||
success, message = email_service.send_test_email(email)
|
||||
|
||||
if success:
|
||||
self.stdout.write(self.style.SUCCESS('Test email sent successfully!'))
|
||||
self.stdout.write('Check your inbox (and spam folder).')
|
||||
else:
|
||||
self.stdout.write(self.style.ERROR(f'Failed to send test email: {message}'))
|
||||
43
monitor/management/commands/setup_notifications.py
Normal file
43
monitor/management/commands/setup_notifications.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from monitor.models import NotificationPreference
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Setup initial notification preferences'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--emails',
|
||||
nargs='+',
|
||||
type=str,
|
||||
default=['ali.c.zeybek@gmail.com', 'alican@alicanzeybek.xyz'],
|
||||
help='Email addresses to setup notifications for'
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
emails = options['emails']
|
||||
|
||||
for email in emails:
|
||||
# Check if preference already exists
|
||||
pref, created = NotificationPreference.objects.get_or_create(
|
||||
email_address=email,
|
||||
defaults={
|
||||
'receive_event_alerts': True,
|
||||
'receive_system_alerts': True,
|
||||
'receive_daily_digest': True,
|
||||
'is_active': True,
|
||||
}
|
||||
)
|
||||
|
||||
if created:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f'Created notification preference for {email}')
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f'Notification preference for {email} already exists')
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f'Setup complete for {len(emails)} email(s)')
|
||||
)
|
||||
Reference in New Issue
Block a user