diff --git a/api/backend/admin/admin_routes.py b/api/backend/admin/admin_routes.py new file mode 100644 index 0000000000..9b8416055f --- /dev/null +++ b/api/backend/admin/admin_routes.py @@ -0,0 +1,1008 @@ +""" +BallWatch Basketball Analytics Platform +======================================= +System Administration Blueprint + +System monitoring, data management, error handling, and maintenance +operations for data engineers and system administrators. + +Author: StatPadders Team +Course: CS 3200 - Summer 2 2025 +""" + +from flask import Blueprint, request, jsonify, make_response, current_app +from backend.db_connection import db +from datetime import datetime, timedelta +import json + +# Create the Admin Blueprint +admin = Blueprint('admin', __name__) + + +# ============================================================================ +# SYSTEM HEALTH & MONITORING ROUTES +# ============================================================================ + +@admin.route('/health', methods=['GET']) +def get_system_health(): + """ + Get comprehensive system health status including database connectivity, + recent errors, and performance metrics. + + User Stories: [Mike-2.5] + """ + try: + current_app.logger.info('GET /system/health - Checking system health status') + + cursor = db.get_db().cursor() + + # Verify database connectivity + cursor.execute('SELECT 1') + db_status = 'healthy' if cursor.fetchone() else 'unhealthy' + + # Get recent error count (last 24 hours) + cursor.execute(''' + SELECT COUNT(*) as error_count + FROM ErrorLogs + WHERE created_at >= DATE_SUB(NOW(), INTERVAL 24 HOUR) + ''') + recent_errors_result = cursor.fetchone() + recent_errors = recent_errors_result['error_count'] if recent_errors_result else 0 + + # Get active data loads + cursor.execute(''' + SELECT COUNT(*) as active_loads + FROM DataLoads + WHERE status IN ('running', 'pending') + ''') + active_loads_result = cursor.fetchone() + active_loads = active_loads_result['active_loads'] if active_loads_result else 0 + + # Get last successful data load + cursor.execute(''' + SELECT load_id, load_type, completed_at + FROM DataLoads + WHERE status = 'completed' + ORDER BY completed_at DESC + LIMIT 1 + ''') + last_successful_load = cursor.fetchone() + + # Get system metrics + cursor.execute(''' + SELECT + (SELECT COUNT(*) FROM Players) as total_players, + (SELECT COUNT(*) FROM Teams) as total_teams, + (SELECT COUNT(*) FROM Game) as total_games, + (SELECT COUNT(*) FROM Users) as total_users + ''') + system_metrics = cursor.fetchone() + + # Determine overall system status + overall_status = 'operational' + if db_status != 'healthy': + overall_status = 'critical' + elif recent_errors > 10: + overall_status = 'degraded' + elif active_loads > 5: + overall_status = 'warning' + + response_data = { + 'overall_status': overall_status, + 'database_status': db_status, + 'recent_errors_24h': recent_errors, + 'active_data_loads': active_loads, + 'last_successful_load': last_successful_load, + 'system_metrics': system_metrics, + 'health_check_timestamp': datetime.now().isoformat() + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error checking system health: {e}') + return make_response(jsonify({ + "overall_status": "error", + "error": "Failed to check system health", + "timestamp": datetime.now().isoformat() + }), 500) + + +# ============================================================================ +# DATA LOAD MANAGEMENT ROUTES +# ============================================================================ + +@admin.route('/data-loads', methods=['GET']) +def get_data_loads(): + """ + Get comprehensive history of data loads with filtering options. + + Query Parameters: + status: Filter by status (pending, running, completed, failed) + load_type: Filter by type (player_stats, team_data, game_data, etc.) + days: Number of days to look back (default: 30) + + User Stories: [Mike-2.5] + """ + try: + current_app.logger.info('GET /system/data-loads - Fetching data load history') + + status = request.args.get('status') + load_type = request.args.get('load_type') + days = request.args.get('days', 30, type=int) + + cursor = db.get_db().cursor() + + query = ''' + SELECT + load_id, + load_type, + status, + started_at, + completed_at, + records_processed, + records_failed, + error_message, + initiated_by, + source_file, + TIMESTAMPDIFF(SECOND, started_at, IFNULL(completed_at, NOW())) as duration_seconds + FROM DataLoads + WHERE started_at >= DATE_SUB(NOW(), INTERVAL %s DAY) + ''' + + params = [days] + + if status: + query += ' AND status = %s' + params.append(status) + if load_type: + query += ' AND load_type = %s' + params.append(load_type) + + query += ' ORDER BY started_at DESC' + + cursor.execute(query, params) + loads_data = cursor.fetchall() + + # Get status summary statistics + cursor.execute(''' + SELECT + status, + COUNT(*) as count, + SUM(records_processed) as total_records_processed, + SUM(records_failed) as total_records_failed + FROM DataLoads + WHERE started_at >= DATE_SUB(NOW(), INTERVAL %s DAY) + GROUP BY status + ''', (days,)) + + status_summary = cursor.fetchall() + + response_data = { + 'loads': loads_data, + 'total_loads': len(loads_data), + 'status_summary': status_summary, + 'analysis_period_days': days + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching data loads: {e}') + return make_response(jsonify({"error": "Failed to fetch data loads"}), 500) + + +@admin.route('/data-loads', methods=['POST']) +def start_data_load(): + """ + Start a new data load process. + + Expected JSON Body: + { + "load_type": "string" (required), + "source_file": "string", + "initiated_by": "string" (required) + } + + User Stories: [Mike-2.1] + """ + try: + current_app.logger.info('POST /system/data-loads - Starting new data load') + + load_data = request.get_json() + + # Validate required fields + required_fields = ['load_type', 'initiated_by'] + for field in required_fields: + if field not in load_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + cursor = db.get_db().cursor() + + # Check for existing running loads of the same type + cursor.execute(''' + SELECT load_id FROM DataLoads + WHERE load_type = %s AND status = 'running' + ''', (load_data['load_type'],)) + + if cursor.fetchone(): + return make_response(jsonify({ + "error": "A load of this type is already running" + }), 409) + + # Insert new data load + query = ''' + INSERT INTO DataLoads ( + load_type, status, started_at, source_file, initiated_by + ) VALUES (%s, 'pending', NOW(), %s, %s) + ''' + + values = ( + load_data['load_type'], + load_data.get('source_file'), + load_data['initiated_by'] + ) + + cursor.execute(query, values) + db.get_db().commit() + + new_load_id = cursor.lastrowid + + return make_response(jsonify({ + "message": "Data load initiated successfully", + "load_id": new_load_id, + "load_type": load_data['load_type'], + "status": "pending" + }), 201) + + except Exception as e: + current_app.logger.error(f'Error starting data load: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to start data load"}), 500) + + +@admin.route('/data-loads/', methods=['PUT']) +def update_data_load(load_id): + """ + Update the status and metrics of a data load. + + Expected JSON Body: + { + "status": "string" (pending, running, completed, failed), + "records_processed": int, + "records_failed": int, + "error_message": "string" + } + + User Stories: [Mike-2.1] + """ + try: + current_app.logger.info(f'PUT /system/data-loads/{load_id} - Updating data load status') + + update_data = request.get_json() + + if not update_data: + return make_response(jsonify({"error": "No update data provided"}), 400) + + # Validate status if provided + if 'status' in update_data: + valid_statuses = ['pending', 'running', 'completed', 'failed'] + if update_data['status'] not in valid_statuses: + return make_response(jsonify({ + "error": f"Invalid status. Must be one of: {valid_statuses}" + }), 400) + + cursor = db.get_db().cursor() + + # Verify load exists + cursor.execute('SELECT load_id FROM DataLoads WHERE load_id = %s', (load_id,)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Data load not found"}), 404) + + # Build dynamic update query + update_fields = [] + values = [] + + if 'status' in update_data: + update_fields.append('status = %s') + values.append(update_data['status']) + + # Set completed_at timestamp if status is final + if update_data['status'] in ['completed', 'failed']: + update_fields.append('completed_at = NOW()') + + for field in ['records_processed', 'records_failed', 'error_message']: + if field in update_data: + update_fields.append(f'{field} = %s') + values.append(update_data[field]) + + if update_fields: + query = f"UPDATE DataLoads SET {', '.join(update_fields)} WHERE load_id = %s" + values.append(load_id) + cursor.execute(query, values) + db.get_db().commit() + + return make_response(jsonify({ + "message": "Data load updated successfully", + "load_id": load_id, + "updated_fields": list(update_data.keys()) + }), 200) + + except Exception as e: + current_app.logger.error(f'Error updating data load: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to update data load"}), 500) + + +# ============================================================================ +# ERROR LOGGING & MANAGEMENT ROUTES +# ============================================================================ + +@admin.route('/error-logs', methods=['GET']) +def get_error_logs(): + """ + Get error log history with comprehensive filtering options. + + Query Parameters: + severity: Filter by severity (info, warning, error, critical) + module: Filter by module/component + resolved: Filter by resolved status (true/false) + days: Number of days to look back (default: 7) + + User Stories: [Mike-2.4, Mike-2.5] + """ + try: + current_app.logger.info('GET /system/error-logs - Fetching error log history') + + severity = request.args.get('severity') + module = request.args.get('module') + resolved = request.args.get('resolved') + days = request.args.get('days', 7, type=int) + + cursor = db.get_db().cursor() + + query = ''' + SELECT + error_id, + error_type, + severity, + module, + error_message, + stack_trace, + user_id, + created_at, + resolved_at, + resolved_by, + resolution_notes + FROM ErrorLogs + WHERE created_at >= DATE_SUB(NOW(), INTERVAL %s DAY) + ''' + + params = [days] + + if severity: + query += ' AND severity = %s' + params.append(severity) + if module: + query += ' AND module = %s' + params.append(module) + if resolved is not None: + if resolved.lower() == 'true': + query += ' AND resolved_at IS NOT NULL' + else: + query += ' AND resolved_at IS NULL' + + query += ' ORDER BY created_at DESC' + + cursor.execute(query, params) + error_logs = cursor.fetchall() + + # Get error summary by severity + cursor.execute(''' + SELECT + severity, + COUNT(*) as count, + SUM(CASE WHEN resolved_at IS NOT NULL THEN 1 ELSE 0 END) as resolved_count + FROM ErrorLogs + WHERE created_at >= DATE_SUB(NOW(), INTERVAL %s DAY) + GROUP BY severity + ORDER BY + CASE severity + WHEN 'critical' THEN 1 + WHEN 'error' THEN 2 + WHEN 'warning' THEN 3 + WHEN 'info' THEN 4 + END + ''', (days,)) + + severity_summary = cursor.fetchall() + + response_data = { + 'error_logs': error_logs, + 'total_errors': len(error_logs), + 'severity_breakdown': severity_summary, + 'analysis_period_days': days + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching error logs: {e}') + return make_response(jsonify({"error": "Failed to fetch error logs"}), 500) + + +@admin.route('/error-logs', methods=['POST']) +def log_error(): + """ + Log a new error in the system. + + Expected JSON Body: + { + "error_type": "string" (required), + "severity": "string" (info, warning, error, critical) (required), + "module": "string" (required), + "error_message": "string" (required), + "stack_trace": "string", + "user_id": int + } + + User Stories: [Mike-2.3] + """ + try: + current_app.logger.info('POST /system/error-logs - Logging new system error') + + error_data = request.get_json() + + # Validate required fields + required_fields = ['error_type', 'severity', 'module', 'error_message'] + for field in required_fields: + if field not in error_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + # Validate severity level + valid_severities = ['info', 'warning', 'error', 'critical'] + if error_data['severity'] not in valid_severities: + return make_response(jsonify({ + "error": f"Invalid severity. Must be one of: {valid_severities}" + }), 400) + + cursor = db.get_db().cursor() + + query = ''' + INSERT INTO ErrorLogs ( + error_type, severity, module, error_message, + stack_trace, user_id, created_at + ) VALUES (%s, %s, %s, %s, %s, %s, NOW()) + ''' + + values = ( + error_data['error_type'], + error_data['severity'], + error_data['module'], + error_data['error_message'], + error_data.get('stack_trace'), + error_data.get('user_id') + ) + + cursor.execute(query, values) + db.get_db().commit() + + new_error_id = cursor.lastrowid + + return make_response(jsonify({ + "message": "Error logged successfully", + "error_id": new_error_id, + "severity": error_data['severity'] + }), 201) + + except Exception as e: + current_app.logger.error(f'Error logging system error: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to log error"}), 500) + + +# ============================================================================ +# DATA VALIDATION & QUALITY ROUTES +# ============================================================================ + +@admin.route('/data-errors', methods=['GET']) +def get_data_errors(): + """ + Get details about data validation errors and integrity issues. + + Query Parameters: + error_type: Filter by error type (duplicate, missing, invalid) + table_name: Filter by affected table + days: Number of days to look back (default: 7) + + User Stories: [Mike-2.4] + """ + try: + current_app.logger.info('GET /system/data-errors - Fetching data validation errors') + + error_type = request.args.get('error_type') + table_name = request.args.get('table_name') + days = request.args.get('days', 7, type=int) + + cursor = db.get_db().cursor() + + query = ''' + SELECT + data_error_id, + error_type, + table_name, + record_id, + field_name, + invalid_value, + expected_format, + detected_at, + resolved_at, + auto_fixed + FROM DataErrors + WHERE detected_at >= DATE_SUB(NOW(), INTERVAL %s DAY) + ''' + + params = [days] + + if error_type: + query += ' AND error_type = %s' + params.append(error_type) + if table_name: + query += ' AND table_name = %s' + params.append(table_name) + + query += ' ORDER BY detected_at DESC' + + cursor.execute(query, params) + data_errors = cursor.fetchall() + + # Get error summary by type and table + cursor.execute(''' + SELECT + error_type, + table_name, + COUNT(*) as count, + SUM(CASE WHEN resolved_at IS NOT NULL THEN 1 ELSE 0 END) as resolved_count + FROM DataErrors + WHERE detected_at >= DATE_SUB(NOW(), INTERVAL %s DAY) + GROUP BY error_type, table_name + ORDER BY count DESC + ''', (days,)) + + error_summary = cursor.fetchall() + + response_data = { + 'errors': data_errors, + 'total_errors': len(data_errors), + 'error_breakdown': error_summary, + 'analysis_period_days': days + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching data errors: {e}') + return make_response(jsonify({"error": "Failed to fetch data errors"}), 500) + + +@admin.route('/data-cleanup', methods=['GET']) +def get_cleanup_schedule(): + """ + Get the current data cleanup schedule and execution history. + + User Stories: [Mike-2.6] + """ + try: + current_app.logger.info('GET /system/data-cleanup - Fetching cleanup schedules') + + cursor = db.get_db().cursor() + + # Get active cleanup schedules + cursor.execute(''' + SELECT + schedule_id, + cleanup_type, + frequency, + next_run, + last_run, + retention_days, + is_active, + created_by, + created_at + FROM CleanupSchedule + WHERE is_active = 1 + ORDER BY next_run + ''') + + active_schedules = cursor.fetchall() + + # Get recent cleanup execution history + cursor.execute(''' + SELECT + history_id, + schedule_id, + cleanup_type, + started_at, + completed_at, + records_deleted, + status, + error_message + FROM CleanupHistory + WHERE started_at >= DATE_SUB(NOW(), INTERVAL 30 DAY) + ORDER BY started_at DESC + LIMIT 20 + ''') + + cleanup_history = cursor.fetchall() + + response_data = { + 'active_schedules': active_schedules, + 'recent_cleanup_history': cleanup_history, + 'total_active_schedules': len(active_schedules) + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching cleanup schedules: {e}') + return make_response(jsonify({"error": "Failed to fetch cleanup schedules"}), 500) + + +@admin.route('/data-cleanup', methods=['POST']) +def schedule_cleanup(): + """ + Schedule a new data cleanup job. + + Expected JSON Body: + { + "cleanup_type": "string" (required), + "frequency": "string" (daily, weekly, monthly) (required), + "retention_days": int (required), + "next_run": "datetime string", + "created_by": "string" (required) + } + + User Stories: [Mike-2.6] + """ + try: + current_app.logger.info('POST /system/data-cleanup - Scheduling new cleanup job') + + cleanup_data = request.get_json() + + # Validate required fields + required_fields = ['cleanup_type', 'frequency', 'retention_days', 'created_by'] + for field in required_fields: + if field not in cleanup_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + # Validate frequency + valid_frequencies = ['daily', 'weekly', 'monthly'] + if cleanup_data['frequency'] not in valid_frequencies: + return make_response(jsonify({ + "error": f"Invalid frequency. Must be one of: {valid_frequencies}" + }), 400) + + cursor = db.get_db().cursor() + + # Calculate next run time if not provided + if 'next_run' not in cleanup_data: + if cleanup_data['frequency'] == 'daily': + next_run = datetime.now() + timedelta(days=1) + elif cleanup_data['frequency'] == 'weekly': + next_run = datetime.now() + timedelta(weeks=1) + else: # monthly + next_run = datetime.now() + timedelta(days=30) + cleanup_data['next_run'] = next_run.isoformat() + + query = ''' + INSERT INTO CleanupSchedule ( + cleanup_type, frequency, next_run, retention_days, + is_active, created_by, created_at + ) VALUES (%s, %s, %s, %s, 1, %s, NOW()) + ''' + + values = ( + cleanup_data['cleanup_type'], + cleanup_data['frequency'], + cleanup_data['next_run'], + cleanup_data['retention_days'], + cleanup_data['created_by'] + ) + + cursor.execute(query, values) + db.get_db().commit() + + new_schedule_id = cursor.lastrowid + + return make_response(jsonify({ + "message": "Cleanup scheduled successfully", + "schedule_id": new_schedule_id, + "cleanup_type": cleanup_data['cleanup_type'], + "frequency": cleanup_data['frequency'] + }), 201) + + except Exception as e: + current_app.logger.error(f'Error scheduling cleanup: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to schedule cleanup"}), 500) + + +@admin.route('/data-validation', methods=['GET']) +def get_validation_reports(): + """ + Get data validation reports and integrity check results. + + Query Parameters: + days: Number of days to look back (default: 7) + status: Filter by validation status (passed, failed, warning) + + User Stories: [Mike-2.4] + """ + try: + current_app.logger.info('GET /system/data-validation - Fetching validation reports') + + days = request.args.get('days', 7, type=int) + status = request.args.get('status') + + cursor = db.get_db().cursor() + + query = ''' + SELECT + validation_id, + validation_type, + table_name, + status, + total_records, + valid_records, + invalid_records, + validation_rules, + error_details, + run_date, + run_by + FROM ValidationReports + WHERE run_date >= DATE_SUB(NOW(), INTERVAL %s DAY) + ''' + + params = [days] + + if status: + query += ' AND status = %s' + params.append(status) + + query += ' ORDER BY run_date DESC' + + cursor.execute(query, params) + validation_reports = cursor.fetchall() + + response_data = { + 'validation_reports': validation_reports, + 'total_reports': len(validation_reports), + 'analysis_period_days': days + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching validation reports: {e}') + return make_response(jsonify({"error": "Failed to fetch validation reports"}), 500) + + +@admin.route('/data-validation', methods=['POST']) +def run_validation_check(): + """ + Run a comprehensive data validation check on specified tables. + + Expected JSON Body: + { + "validation_type": "string" (required), + "table_name": "string" (required), + "validation_rules": "object", + "run_by": "string" (required) + } + + User Stories: [Mike-2.4] + """ + try: + current_app.logger.info('POST /system/data-validation - Running validation check') + + validation_data = request.get_json() + + # Validate required fields + required_fields = ['validation_type', 'table_name', 'run_by'] + for field in required_fields: + if field not in validation_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + cursor = db.get_db().cursor() + + # Perform validation based on table_name + table_name = validation_data['table_name'] + validation_type = validation_data['validation_type'] + + # Execute table-specific validation logic + if table_name == 'Players': + cursor.execute(''' + SELECT + COUNT(*) as total, + SUM(CASE WHEN first_name IS NULL OR last_name IS NULL THEN 1 ELSE 0 END) as invalid_names, + SUM(CASE WHEN age <= 0 OR age > 50 THEN 1 ELSE 0 END) as invalid_ages, + SUM(CASE WHEN current_salary < 0 THEN 1 ELSE 0 END) as invalid_salaries + FROM Players + ''') + + result = cursor.fetchone() + total_records = result['total'] + invalid_records = result['invalid_names'] + result['invalid_ages'] + result['invalid_salaries'] + + elif table_name == 'Teams': + cursor.execute(''' + SELECT + COUNT(*) as total, + SUM(CASE WHEN name IS NULL OR city IS NULL THEN 1 ELSE 0 END) as invalid_basic, + SUM(CASE WHEN conference NOT IN ('Eastern', 'Western') THEN 1 ELSE 0 END) as invalid_conference + FROM Teams + ''') + + result = cursor.fetchone() + total_records = result['total'] + invalid_records = result['invalid_basic'] + result['invalid_conference'] + + elif table_name == 'PlayerGameStats': + cursor.execute(''' + SELECT + COUNT(*) as total, + SUM(CASE WHEN points < 0 OR rebounds < 0 OR assists < 0 THEN 1 ELSE 0 END) as negative_stats, + SUM(CASE WHEN shooting_percentage > 1.0 OR shooting_percentage < 0 THEN 1 ELSE 0 END) as invalid_percentages, + SUM(CASE WHEN minutes_played > 48 OR minutes_played < 0 THEN 1 ELSE 0 END) as invalid_minutes + FROM PlayerGameStats + ''') + + result = cursor.fetchone() + total_records = result['total'] + invalid_records = result['negative_stats'] + result['invalid_percentages'] + result['invalid_minutes'] + + else: + # Generic validation for other tables + cursor.execute(f'SELECT COUNT(*) as total FROM {table_name}') + result = cursor.fetchone() + total_records = result['total'] + invalid_records = 0 + + valid_records = total_records - invalid_records + + # Determine validation status + if invalid_records == 0: + validation_status = 'passed' + elif invalid_records < total_records * 0.05: # Less than 5% invalid + validation_status = 'warning' + else: + validation_status = 'failed' + + # Insert validation report + query = ''' + INSERT INTO ValidationReports ( + validation_type, table_name, status, total_records, + valid_records, invalid_records, validation_rules, + run_date, run_by + ) VALUES (%s, %s, %s, %s, %s, %s, %s, NOW(), %s) + ''' + + values = ( + validation_type, + table_name, + validation_status, + total_records, + valid_records, + invalid_records, + json.dumps(validation_data.get('validation_rules', {})), + validation_data['run_by'] + ) + + cursor.execute(query, values) + db.get_db().commit() + + new_validation_id = cursor.lastrowid + + return make_response(jsonify({ + "message": "Validation check completed", + "validation_id": new_validation_id, + "results": { + "status": validation_status, + "total_records": total_records, + "valid_records": valid_records, + "invalid_records": invalid_records, + "validity_percentage": round((valid_records / total_records) * 100, 2) if total_records > 0 else 0 + } + }), 201) + + except Exception as e: + current_app.logger.error(f'Error running validation check: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to run validation check"}), 500) + + +# ============================================================================ +# BULK DATA OPERATIONS ROUTES +# ============================================================================ + +@admin.route('/bulk-operations/cleanup', methods=['POST']) +def execute_bulk_cleanup(): + """ + Execute bulk cleanup operations for data maintenance. + + Expected JSON Body: + { + "operation_type": "string" (required), + "target_tables": ["string"] (required), + "retention_days": int (required), + "dry_run": boolean (default: true), + "executed_by": "string" (required) + } + + User Stories: [Mike-2.6] + """ + try: + current_app.logger.info('POST /system/bulk-operations/cleanup - Executing bulk cleanup') + + cleanup_data = request.get_json() + + # Validate required fields + required_fields = ['operation_type', 'target_tables', 'retention_days', 'executed_by'] + for field in required_fields: + if field not in cleanup_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + dry_run = cleanup_data.get('dry_run', True) + retention_days = cleanup_data['retention_days'] + + cursor = db.get_db().cursor() + + cleanup_results = [] + total_records_affected = 0 + + # Process each target table + for table in cleanup_data['target_tables']: + if table in ['ErrorLogs', 'DataLoads', 'ValidationReports']: + # Count records that would be affected + count_query = f''' + SELECT COUNT(*) as count + FROM {table} + WHERE created_at < DATE_SUB(NOW(), INTERVAL %s DAY) + ''' + + cursor.execute(count_query, (retention_days,)) + count_result = cursor.fetchone() + records_to_delete = count_result['count'] if count_result else 0 + + if not dry_run and records_to_delete > 0: + # Execute actual deletion + delete_query = f''' + DELETE FROM {table} + WHERE created_at < DATE_SUB(NOW(), INTERVAL %s DAY) + ''' + cursor.execute(delete_query, (retention_days,)) + + cleanup_results.append({ + 'table': table, + 'records_affected': records_to_delete, + 'action': 'would_delete' if dry_run else 'deleted' + }) + + total_records_affected += records_to_delete + + if not dry_run: + db.get_db().commit() + + return make_response(jsonify({ + "message": "Bulk cleanup operation completed", + "dry_run": dry_run, + "total_records_affected": total_records_affected, + "cleanup_results": cleanup_results, + "retention_policy_days": retention_days + }), 200) + + except Exception as e: + current_app.logger.error(f'Error executing bulk cleanup: {e}') + if not cleanup_data.get('dry_run', True): + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to execute bulk cleanup"}), 500) \ No newline at end of file diff --git a/api/backend/analytics/analytics_routes.py b/api/backend/analytics/analytics_routes.py new file mode 100644 index 0000000000..bed04f3af7 --- /dev/null +++ b/api/backend/analytics/analytics_routes.py @@ -0,0 +1,483 @@ +######################################################## +# Analytics Blueprint +# Handles performance analytics, player matchups, and statistical comparisons +# for the BallWatch basketball analytics platform +######################################################## + +from flask import Blueprint, request, jsonify, make_response, current_app +from backend.db_connection import db + +#------------------------------------------------------------ +# Create a new Blueprint object for analytics-related routes +analytics = Blueprint('analytics', __name__) + + +#------------------------------------------------------------ +# Get player matchup analysis [Marcus-3.2] +@analytics.route('/player-matchups', methods=['GET']) +def get_player_matchups(): + """ + Get comprehensive matchup analysis between two players. + + Query parameters: + - player1_id: first player ID (required) + - player2_id: second player ID (required) + - season: optional season filter + + Returns: + JSON: Head-to-head matchup data and performance comparison + """ + try: + current_app.logger.info('GET /player-matchups handler started') + + # Extract and validate parameters + player1_id = request.args.get('player1_id', type=int) + player2_id = request.args.get('player2_id', type=int) + season = request.args.get('season') + + if not player1_id or not player2_id: + return make_response(jsonify({ + "error": "Both player1_id and player2_id are required" + }), 400) + + cursor = db.get_db().cursor() + + # Get head-to-head games where both players participated + matchup_query = ''' + SELECT + g.game_id, + g.game_date, + g.home_team_id, + g.away_team_id, + ht.name AS home_team, + at.name AS away_team, + pgs1.player_id AS player1_id, + p1.first_name AS player1_first_name, + p1.last_name AS player1_last_name, + pgs1.points AS player1_points, + pgs1.rebounds AS player1_rebounds, + pgs1.assists AS player1_assists, + pgs1.minutes_played AS player1_minutes, + pgs2.player_id AS player2_id, + p2.first_name AS player2_first_name, + p2.last_name AS player2_last_name, + pgs2.points AS player2_points, + pgs2.rebounds AS player2_rebounds, + pgs2.assists AS player2_assists, + pgs2.minutes_played AS player2_minutes + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + JOIN PlayerGameStats pgs1 ON g.game_id = pgs1.game_id AND pgs1.player_id = %s + JOIN PlayerGameStats pgs2 ON g.game_id = pgs2.game_id AND pgs2.player_id = %s + JOIN Players p1 ON pgs1.player_id = p1.player_id + JOIN Players p2 ON pgs2.player_id = p2.player_id + WHERE 1=1 + ''' + + params = [player1_id, player2_id] + + # Apply season filter if provided + if season: + matchup_query += ' AND g.season = %s' + params.append(season) + + matchup_query += ' ORDER BY g.game_date DESC' + + cursor.execute(matchup_query, params) + matchup_games = cursor.fetchall() + + # Calculate aggregated comparison statistics + if matchup_games: + player1_avg_points = sum(g['player1_points'] for g in matchup_games) / len(matchup_games) + player2_avg_points = sum(g['player2_points'] for g in matchup_games) / len(matchup_games) + player1_wins = sum(1 for g in matchup_games if g['player1_points'] > g['player2_points']) + player2_wins = len(matchup_games) - player1_wins + else: + player1_avg_points = player2_avg_points = 0 + player1_wins = player2_wins = 0 + + response_data = { + 'matchup_games': matchup_games, + 'total_matchups': len(matchup_games), + 'summary': { + 'player1': { + 'id': player1_id, + 'avg_points': round(player1_avg_points, 1), + 'head_to_head_wins': player1_wins + }, + 'player2': { + 'id': player2_id, + 'avg_points': round(player2_avg_points, 1), + 'head_to_head_wins': player2_wins + } + } + } + + current_app.logger.info(f'Successfully analyzed matchup between players {player1_id} and {player2_id}') + response = make_response(jsonify(response_data)) + response.status_code = 200 + return response + + except Exception as e: + current_app.logger.error(f'Error in get_player_matchups: {str(e)}') + return make_response(jsonify({"error": "Failed to fetch player matchups"}), 500) + + +#------------------------------------------------------------ +# Get opponent analysis and scouting report [Marcus-3.1] +@analytics.route('/opponent-reports', methods=['GET']) +def get_opponent_reports(): + """ + Get comprehensive opponent team analysis and scouting information. + + Query parameters: + - team_id: your team ID (required) + - opponent_id: opponent team ID (required) + - last_n_games: number of recent games to analyze (default: 10) + + Returns: + JSON: Complete opponent analysis with key players and performance trends + """ + try: + current_app.logger.info('GET /opponent-reports handler started') + + # Extract and validate parameters + team_id = request.args.get('team_id', type=int) + opponent_id = request.args.get('opponent_id', type=int) + last_n_games = request.args.get('last_n_games', 10, type=int) + + if not team_id or not opponent_id: + return make_response(jsonify({ + "error": "Both team_id and opponent_id are required" + }), 400) + + cursor = db.get_db().cursor() + + # Get opponent team information + opponent_info_query = ''' + SELECT + t.*, + COUNT(DISTINCT tp.player_id) AS roster_size, + ROUND(AVG(p.age), 1) AS avg_age + FROM Teams t + LEFT JOIN TeamsPlayers tp ON t.team_id = tp.team_id AND tp.left_date IS NULL + LEFT JOIN Players p ON tp.player_id = p.player_id + WHERE t.team_id = %s + GROUP BY t.team_id + ''' + + cursor.execute(opponent_info_query, (opponent_id,)) + opponent_info = cursor.fetchone() + + if not opponent_info: + return make_response(jsonify({"error": "Opponent team not found"}), 404) + + # Get recent head-to-head history + head_to_head_query = ''' + SELECT + g.game_id, + g.game_date, + g.home_team_id, + g.away_team_id, + g.home_score, + g.away_score, + CASE + WHEN (g.home_team_id = %s AND g.home_score > g.away_score) OR + (g.away_team_id = %s AND g.away_score > g.home_score) THEN 'W' + ELSE 'L' + END AS your_team_result + FROM Game g + WHERE ((g.home_team_id = %s AND g.away_team_id = %s) OR + (g.home_team_id = %s AND g.away_team_id = %s)) + AND g.status = 'completed' + ORDER BY g.game_date DESC + LIMIT %s + ''' + + cursor.execute(head_to_head_query, + (team_id, team_id, team_id, opponent_id, opponent_id, team_id, last_n_games)) + head_to_head = cursor.fetchall() + + # Get opponent's recent performance + recent_performance_query = ''' + SELECT + g.game_id, + g.game_date, + CASE + WHEN g.home_team_id = %s THEN g.home_score + ELSE g.away_score + END AS opponent_score, + CASE + WHEN g.home_team_id = %s THEN g.away_score + ELSE g.home_score + END AS other_team_score, + CASE + WHEN g.home_team_id = %s THEN at.name + ELSE ht.name + END AS vs_team + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE (g.home_team_id = %s OR g.away_team_id = %s) + AND g.status = 'completed' + ORDER BY g.game_date DESC + LIMIT %s + ''' + + cursor.execute(recent_performance_query, + (opponent_id, opponent_id, opponent_id, opponent_id, opponent_id, last_n_games)) + recent_games = cursor.fetchall() + + # Get opponent's key players + key_players_query = ''' + SELECT + p.player_id, + p.first_name, + p.last_name, + p.position, + COUNT(pgs.game_id) AS games_played, + ROUND(AVG(pgs.points), 1) AS avg_points, + ROUND(AVG(pgs.rebounds), 1) AS avg_rebounds, + ROUND(AVG(pgs.assists), 1) AS avg_assists + FROM Players p + JOIN TeamsPlayers tp ON p.player_id = tp.player_id + LEFT JOIN PlayerGameStats pgs ON p.player_id = pgs.player_id + WHERE tp.team_id = %s AND tp.left_date IS NULL + GROUP BY p.player_id, p.first_name, p.last_name, p.position + HAVING games_played > 0 + ORDER BY avg_points DESC + LIMIT 5 + ''' + + cursor.execute(key_players_query, (opponent_id,)) + key_players = cursor.fetchall() + + # Calculate performance statistics + if recent_games: + avg_points_scored = sum(g['opponent_score'] for g in recent_games) / len(recent_games) + avg_points_allowed = sum(g['other_team_score'] for g in recent_games) / len(recent_games) + wins = sum(1 for g in recent_games if g['opponent_score'] > g['other_team_score']) + win_percentage = (wins / len(recent_games)) * 100 + else: + avg_points_scored = avg_points_allowed = win_percentage = 0 + + response_data = { + 'opponent_info': opponent_info, + 'head_to_head_history': head_to_head, + 'recent_performance': { + 'games': recent_games, + 'avg_points_scored': round(avg_points_scored, 1), + 'avg_points_allowed': round(avg_points_allowed, 1), + 'win_percentage': round(win_percentage, 1), + 'last_n_games': len(recent_games) + }, + 'key_players': key_players + } + + current_app.logger.info(f'Successfully generated opponent report for team {opponent_id}') + response = make_response(jsonify(response_data)) + response.status_code = 200 + return response + + except Exception as e: + current_app.logger.error(f'Error in get_opponent_reports: {str(e)}') + return make_response(jsonify({"error": "Failed to fetch opponent report"}), 500) + + +#------------------------------------------------------------ +# Get lineup effectiveness analysis [Marcus-3.4] +@analytics.route('/lineup-configurations', methods=['GET']) +def get_lineup_configurations(): + """ + Get lineup effectiveness analysis for strategic decision making. + + Query parameters: + - team_id: team ID (required) + - min_games: minimum games played together (default: 5) + - season: optional season filter + + Returns: + JSON: Lineup effectiveness data with performance metrics + """ + try: + current_app.logger.info('GET /lineup-configurations handler started') + + # Extract and validate parameters + team_id = request.args.get('team_id', type=int) + min_games = request.args.get('min_games', 5, type=int) + season = request.args.get('season') + + if not team_id: + return make_response(jsonify({"error": "team_id is required"}), 400) + + cursor = db.get_db().cursor() + + # Get lineup configurations and their effectiveness + lineup_query = ''' + SELECT + lc.lineup_id, + GROUP_CONCAT(CONCAT(p.first_name, ' ', p.last_name) ORDER BY pl.position_in_lineup) AS lineup, + lc.plus_minus, + lc.offensive_rating, + lc.defensive_rating, + COUNT(DISTINCT lc.quarter) AS quarters_played + FROM LineupConfiguration lc + JOIN PlayerLineups pl ON lc.lineup_id = pl.lineup_id + JOIN Players p ON p.player_id = pl.player_id + WHERE lc.team_id = %s + GROUP BY lc.lineup_id, lc.plus_minus, lc.offensive_rating, lc.defensive_rating + ORDER BY lc.plus_minus DESC + LIMIT 10 + ''' + + cursor.execute(lineup_query, [team_id]) + lineup_stats = cursor.fetchall() + + response_data = { + 'team_id': team_id, + 'lineup_effectiveness': lineup_stats, + 'filters': { + 'min_games': min_games, + 'season': season + } + } + + current_app.logger.info(f'Successfully retrieved lineup configurations for team {team_id}') + response = make_response(jsonify(response_data)) + response.status_code = 200 + return response + + except Exception as e: + current_app.logger.error(f'Error in get_lineup_configurations: {str(e)}') + return make_response(jsonify({"error": "Failed to fetch lineup configurations"}), 500) + + +#------------------------------------------------------------ +# Get season performance summaries [Marcus-3.6] +@analytics.route('/season-summaries', methods=['GET']) +def get_season_summaries(): + """ + Get comprehensive season performance summaries for teams or players. + + Query parameters: + - entity_type: 'team' or 'player' (required) + - entity_id: team_id or player_id (required) + - season: specific season (optional, defaults to current) + + Returns: + JSON: Season summary with key performance indicators + """ + try: + current_app.logger.info('GET /season-summaries handler started') + + # Extract and validate parameters + entity_type = request.args.get('entity_type') + entity_id = request.args.get('entity_id', type=int) + season = request.args.get('season') + + if not entity_type or not entity_id: + return make_response(jsonify({ + "error": "entity_type and entity_id are required" + }), 400) + + if entity_type not in ['team', 'player']: + return make_response(jsonify({ + "error": "entity_type must be 'team' or 'player'" + }), 400) + + cursor = db.get_db().cursor() + + if entity_type == 'team': + # Get comprehensive team season summary + team_summary_query = ''' + SELECT + t.name AS team_name, + COUNT(DISTINCT g.game_id) AS games_played, + SUM(CASE + WHEN (g.home_team_id = %s AND g.home_score > g.away_score) OR + (g.away_team_id = %s AND g.away_score > g.home_score) THEN 1 + ELSE 0 + END) AS wins, + SUM(CASE + WHEN (g.home_team_id = %s AND g.home_score < g.away_score) OR + (g.away_team_id = %s AND g.away_score < g.home_score) THEN 1 + ELSE 0 + END) AS losses, + ROUND(AVG(CASE + WHEN g.home_team_id = %s THEN g.home_score + ELSE g.away_score + END), 1) AS avg_points_scored, + ROUND(AVG(CASE + WHEN g.home_team_id = %s THEN g.away_score + ELSE g.home_score + END), 1) AS avg_points_allowed + FROM Teams t + JOIN Game g ON (g.home_team_id = t.team_id OR g.away_team_id = t.team_id) + WHERE t.team_id = %s AND g.status = 'completed' + ''' + + params = [entity_id] * 7 + + if season: + team_summary_query += ' AND g.season = %s' + params.append(season) + + team_summary_query += ' GROUP BY t.name' + + cursor.execute(team_summary_query, params) + summary = cursor.fetchone() + + else: # entity_type == 'player' + # Get comprehensive player season summary + player_summary_query = ''' + SELECT + p.first_name, + p.last_name, + p.position, + t.name AS team_name, + COUNT(pgs.game_id) AS games_played, + ROUND(AVG(pgs.points), 1) AS avg_points, + ROUND(AVG(pgs.rebounds), 1) AS avg_rebounds, + ROUND(AVG(pgs.assists), 1) AS avg_assists, + ROUND(AVG(pgs.steals), 1) AS avg_steals, + ROUND(AVG(pgs.blocks), 1) AS avg_blocks, + ROUND(AVG(pgs.plus_minus), 1) AS avg_plus_minus, + ROUND(AVG(pgs.minutes_played), 1) AS avg_minutes, + SUM(pgs.points) AS total_points, + MAX(pgs.points) AS season_high, + MIN(pgs.points) AS season_low + FROM Players p + LEFT JOIN PlayerGameStats pgs ON p.player_id = pgs.player_id + LEFT JOIN TeamsPlayers tp ON p.player_id = tp.player_id AND tp.left_date IS NULL + LEFT JOIN Teams t ON tp.team_id = t.team_id + LEFT JOIN Game g ON pgs.game_id = g.game_id + WHERE p.player_id = %s + ''' + + params = [entity_id] + + if season: + player_summary_query += ' AND g.season = %s' + params.append(season) + + player_summary_query += ' GROUP BY p.player_id, p.first_name, p.last_name, p.position, t.name' + + cursor.execute(player_summary_query, params) + summary = cursor.fetchone() + + response_data = { + 'entity_type': entity_type, + 'entity_id': entity_id, + 'season': season if season else 'current', + 'summary': summary + } + + current_app.logger.info(f'Successfully generated season summary for {entity_type} {entity_id}') + response = make_response(jsonify(response_data)) + response.status_code = 200 + return response + + except Exception as e: + current_app.logger.error(f'Error in get_season_summaries: {str(e)}') + return make_response(jsonify({"error": "Failed to fetch season summary"}), 500) \ No newline at end of file diff --git a/api/backend/core/basketball_routes.py b/api/backend/core/basketball_routes.py new file mode 100644 index 0000000000..d9195c22cf --- /dev/null +++ b/api/backend/core/basketball_routes.py @@ -0,0 +1,1594 @@ +""" +BallWatch Basketball Analytics Platform +======================================= +Core Basketball Operations Blueprint + +Consolidated routes for Players, Teams, and Games management. +Serves the core basketball functionality for superfans, coaches, and GMs. + +Author: StatPadders Team +Course: CS 3200 - Summer 2 2025 +""" + +from flask import Blueprint, request, jsonify, make_response, current_app +from datetime import datetime, timedelta +from backend.db_connection import db + +# Create the Basketball Blueprint +basketball = Blueprint('basketball', __name__) + + +# ============================================================================ +# PLAYER MANAGEMENT ROUTES +# ============================================================================ + +@basketball.route('/players', methods=['GET']) +def get_players(): + """ + Get all players with optional filters. + + Query Parameters: + position: Filter by position (PG, SG, SF, PF, C) + min_age: Minimum age filter + max_age: Maximum age filter + team_id: Filter by team ID + min_salary: Minimum salary filter + max_salary: Maximum salary filter + + User Stories: [Johnny-1.2, Johnny-1.3, Andre-4.1, Andre-4.4] + """ + try: + current_app.logger.info('GET /basketball/players - Fetching players with filters') + + # Extract query parameters + position = request.args.get('position') + min_age = request.args.get('min_age', type=int) + max_age = request.args.get('max_age', type=int) + team_id = request.args.get('team_id', type=int) + min_salary = request.args.get('min_salary', type=float) + max_salary = request.args.get('max_salary', type=float) + + cursor = db.get_db().cursor() + + # Build dynamic query with filters + query = ''' + SELECT + p.player_id, + p.first_name, + p.last_name, + p.position, + p.age, + p.years_exp, + p.college, + p.current_salary, + p.expected_salary, + p.height, + p.weight, + t.name AS current_team, + t.team_id + FROM Players p + LEFT JOIN TeamsPlayers tp ON p.player_id = tp.player_id AND tp.left_date IS NULL + LEFT JOIN Teams t ON tp.team_id = t.team_id + WHERE 1=1 + ''' + + params = [] + + # Apply filters dynamically + if position: + query += ' AND p.position = %s' + params.append(position) + if min_age is not None: + query += ' AND p.age >= %s' + params.append(min_age) + if max_age is not None: + query += ' AND p.age <= %s' + params.append(max_age) + if team_id: + query += ' AND t.team_id = %s' + params.append(team_id) + if min_salary is not None: + query += ' AND p.current_salary >= %s' + params.append(min_salary) + if max_salary is not None: + query += ' AND p.current_salary <= %s' + params.append(max_salary) + + query += ' ORDER BY p.last_name, p.first_name' + + cursor.execute(query, params) + players_data = cursor.fetchall() + + return make_response(jsonify({ + 'players': players_data, + 'total_count': len(players_data), + 'filters_applied': { + 'position': position, + 'age_range': f"{min_age}-{max_age}" if min_age or max_age else None, + 'team_id': team_id, + 'salary_range': f"${min_salary}-${max_salary}" if min_salary or max_salary else None + } + }), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching players: {e}') + return make_response(jsonify({"error": "Failed to fetch players"}), 500) + + +@basketball.route('/players', methods=['POST']) +def add_player(): + """ + Add a new player profile to the system. + + Expected JSON Body: + { + "first_name": "string" (required), + "last_name": "string" (required), + "position": "string" (required), + "age": int (required), + "years_exp": int, + "college": "string", + "current_salary": float, + "expected_salary": float, + "height": "string", + "weight": int + } + + User Stories: [Mike-2.2] + """ + try: + current_app.logger.info('POST /basketball/players - Adding new player') + + player_data = request.get_json() + + # Validate required fields + required_fields = ['first_name', 'last_name', 'position', 'age'] + for field in required_fields: + if field not in player_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + cursor = db.get_db().cursor() + + # Insert new player + query = ''' + INSERT INTO Players ( + first_name, last_name, position, age, years_exp, + college, current_salary, expected_salary, height, weight + ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + ''' + + values = ( + player_data['first_name'], + player_data['last_name'], + player_data['position'], + player_data['age'], + player_data.get('years_exp', 0), + player_data.get('college'), + player_data.get('current_salary', 0), + player_data.get('expected_salary', 0), + player_data.get('height'), + player_data.get('weight') + ) + + cursor.execute(query, values) + db.get_db().commit() + + new_player_id = cursor.lastrowid + + return make_response(jsonify({ + "message": "Player added successfully", + "player_id": new_player_id, + "player_name": f"{player_data['first_name']} {player_data['last_name']}" + }), 201) + + except Exception as e: + current_app.logger.error(f'Error adding player: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to add player"}), 500) + + +@basketball.route('/players/', methods=['PUT']) +def update_player(player_id): + """ + Update player information. + + Expected JSON Body (all fields optional): + { + "position": "string", + "age": int, + "years_exp": int, + "current_salary": float, + "expected_salary": float, + "team_id": int, + "height": "string", + "weight": int + } + + User Stories: [Mike-2.1] + """ + try: + current_app.logger.info(f'PUT /basketball/players/{player_id} - Updating player') + + player_data = request.get_json() + + if not player_data: + return make_response(jsonify({"error": "No data provided for update"}), 400) + + cursor = db.get_db().cursor() + + # Check if player exists + cursor.execute('SELECT player_id FROM Players WHERE player_id = %s', (player_id,)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Player not found"}), 404) + + # Build dynamic update query for Players table + update_fields = [] + values = [] + + player_fields = ['position', 'age', 'years_exp', 'current_salary', + 'expected_salary', 'height', 'weight'] + + for field in player_fields: + if field in player_data: + update_fields.append(f'{field} = %s') + values.append(player_data[field]) + + if update_fields: + query = f"UPDATE Players SET {', '.join(update_fields)} WHERE player_id = %s" + values.append(player_id) + cursor.execute(query, values) + + # Handle team assignment separately + if 'team_id' in player_data: + new_team_id = player_data['team_id'] + + # End current team association + cursor.execute(''' + UPDATE TeamsPlayers + SET left_date = CURDATE() + WHERE player_id = %s AND left_date IS NULL + ''', (player_id,)) + + # Create new team association + cursor.execute(''' + INSERT INTO TeamsPlayers (team_id, player_id, joined_date) + VALUES (%s, %s, CURDATE()) + ''', (new_team_id, player_id)) + + db.get_db().commit() + + return make_response(jsonify({ + "message": "Player updated successfully", + "player_id": player_id, + "updated_fields": list(player_data.keys()) + }), 200) + + except Exception as e: + current_app.logger.error(f'Error updating player: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to update player"}), 500) + + +@basketball.route('/players//stats', methods=['GET']) +def get_player_stats(player_id): + """ + Get player's performance statistics. + + Query Parameters: + season: Optional season filter + game_type: Optional game type filter ('regular', 'playoff') + + User Stories: [Johnny-1.1, Johnny-1.3, Johnny-1.4, Andre-4.3] + """ + try: + current_app.logger.info(f'GET /basketball/players/{player_id}/stats - Fetching player stats') + + season = request.args.get('season') + game_type = request.args.get('game_type') + + cursor = db.get_db().cursor() + + # Get comprehensive player statistics + query = ''' + SELECT + p.player_id, + p.first_name, + p.last_name, + p.position, + COUNT(pgs.game_id) AS games_played, + ROUND(AVG(pgs.points), 1) AS avg_points, + ROUND(AVG(pgs.rebounds), 1) AS avg_rebounds, + ROUND(AVG(pgs.assists), 1) AS avg_assists, + ROUND(AVG(pgs.steals), 1) AS avg_steals, + ROUND(AVG(pgs.blocks), 1) AS avg_blocks, + ROUND(AVG(pgs.turnovers), 1) AS avg_turnovers, + ROUND(AVG(pgs.shooting_percentage), 3) AS avg_shooting_pct, + ROUND(AVG(pgs.three_point_percentage), 3) AS avg_three_point_pct, + ROUND(AVG(pgs.free_throw_percentage), 3) AS avg_free_throw_pct, + ROUND(AVG(pgs.plus_minus), 1) AS avg_plus_minus, + ROUND(AVG(pgs.minutes_played), 1) AS avg_minutes, + SUM(pgs.points) AS total_points, + SUM(pgs.rebounds) AS total_rebounds, + SUM(pgs.assists) AS total_assists, + MAX(pgs.points) AS season_high_points, + MIN(pgs.points) AS season_low_points + FROM Players p + LEFT JOIN PlayerGameStats pgs ON p.player_id = pgs.player_id + LEFT JOIN Game g ON pgs.game_id = g.game_id + WHERE p.player_id = %s + ''' + + params = [player_id] + + if season: + query += ' AND g.season = %s' + params.append(season) + if game_type: + query += ' AND g.game_type = %s' + params.append(game_type) + + query += ' GROUP BY p.player_id, p.first_name, p.last_name, p.position' + + cursor.execute(query, params) + stats_data = cursor.fetchone() + + if not stats_data: + return make_response(jsonify({"error": "Player not found"}), 404) + + # Get recent games performance + cursor.execute(''' + SELECT + g.game_id, + g.game_date, + g.home_team_id, + g.away_team_id, + ht.name AS home_team, + at.name AS away_team, + pgs.points, + pgs.rebounds, + pgs.assists, + pgs.minutes_played + FROM PlayerGameStats pgs + JOIN Game g ON pgs.game_id = g.game_id + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE pgs.player_id = %s + ORDER BY g.game_date DESC + LIMIT 10 + ''', (player_id,)) + + recent_games = cursor.fetchall() + + response_data = { + 'player_stats': stats_data, + 'recent_games': recent_games, + 'filters': { + 'season': season, + 'game_type': game_type + } + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching player stats: {e}') + return make_response(jsonify({"error": "Failed to fetch player stats"}), 500) + + +@basketball.route('/players//stats', methods=['PUT']) +def update_player_stats(player_id): + """ + Update or add player statistics for a specific game. + + Expected JSON Body: + { + "game_id": int (required), + "points": int, + "rebounds": int, + "assists": int, + "steals": int, + "blocks": int, + "turnovers": int, + "shooting_percentage": float, + "three_point_percentage": float, + "free_throw_percentage": float, + "plus_minus": int, + "minutes_played": int + } + + User Stories: [Mike-2.1] + """ + try: + current_app.logger.info(f'PUT /basketball/players/{player_id}/stats - Updating stats') + + stats_data = request.get_json() + + if 'game_id' not in stats_data: + return make_response(jsonify({"error": "game_id is required"}), 400) + + cursor = db.get_db().cursor() + + # Check if stats already exist for this player and game + cursor.execute(''' + SELECT * FROM PlayerGameStats + WHERE player_id = %s AND game_id = %s + ''', (player_id, stats_data['game_id'])) + + existing_stats = cursor.fetchone() + + if existing_stats: + # Update existing statistics + update_fields = [] + values = [] + + stat_fields = ['points', 'rebounds', 'assists', 'steals', 'blocks', + 'turnovers', 'shooting_percentage', 'three_point_percentage', + 'free_throw_percentage', 'plus_minus', 'minutes_played'] + + for field in stat_fields: + if field in stats_data: + update_fields.append(f'{field} = %s') + values.append(stats_data[field]) + + if update_fields: + query = f''' + UPDATE PlayerGameStats + SET {', '.join(update_fields)} + WHERE player_id = %s AND game_id = %s + ''' + values.extend([player_id, stats_data['game_id']]) + cursor.execute(query, values) + else: + # Insert new statistics + query = ''' + INSERT INTO PlayerGameStats ( + player_id, game_id, points, rebounds, assists, steals, blocks, + turnovers, shooting_percentage, three_point_percentage, + free_throw_percentage, plus_minus, minutes_played + ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + ''' + + values = ( + player_id, + stats_data['game_id'], + stats_data.get('points', 0), + stats_data.get('rebounds', 0), + stats_data.get('assists', 0), + stats_data.get('steals', 0), + stats_data.get('blocks', 0), + stats_data.get('turnovers', 0), + stats_data.get('shooting_percentage', 0), + stats_data.get('three_point_percentage', 0), + stats_data.get('free_throw_percentage', 0), + stats_data.get('plus_minus', 0), + stats_data.get('minutes_played', 0) + ) + + cursor.execute(query, values) + + db.get_db().commit() + + return make_response(jsonify({ + "message": "Player stats updated successfully", + "player_id": player_id, + "game_id": stats_data['game_id'] + }), 200) + + except Exception as e: + current_app.logger.error(f'Error updating player stats: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to update player stats"}), 500) + + +# ============================================================================ +# TEAM MANAGEMENT ROUTES +# ============================================================================ + +@basketball.route('/teams', methods=['GET']) +def get_teams(): + """ + Get all teams with optional filters and roster information. + + Query Parameters: + conference: Filter by conference ('Eastern', 'Western') + division: Filter by division + city: Filter by city + + User Stories: [Johnny-1.2] + """ + try: + current_app.logger.info('GET /basketball/teams - Fetching teams') + + conference = request.args.get('conference') + division = request.args.get('division') + city = request.args.get('city') + + cursor = db.get_db().cursor() + + # Get teams with roster statistics + query = ''' + SELECT + t.team_id, + t.name, + t.city, + t.state, + t.arena, + t.conference, + t.division, + t.coach, + t.gm, + t.owner, + t.championships, + t.founded_year, + t.offensive_system, + t.defensive_system, + COUNT(DISTINCT tp.player_id) AS roster_size, + ROUND(AVG(p.age), 1) AS avg_player_age, + SUM(p.current_salary) AS total_salary + FROM Teams t + LEFT JOIN TeamsPlayers tp ON t.team_id = tp.team_id AND tp.left_date IS NULL + LEFT JOIN Players p ON tp.player_id = p.player_id + WHERE 1=1 + ''' + + params = [] + + if conference: + query += ' AND t.conference = %s' + params.append(conference) + if division: + query += ' AND t.division = %s' + params.append(division) + if city: + query += ' AND t.city = %s' + params.append(city) + + query += ''' + GROUP BY t.team_id, t.name, t.city, t.state, t.arena, + t.conference, t.division, t.coach, t.gm, t.owner, + t.championships, t.founded_year, t.offensive_system, t.defensive_system + ORDER BY t.conference, t.division, t.name + ''' + + cursor.execute(query, params) + teams_data = cursor.fetchall() + + return make_response(jsonify({ + 'teams': teams_data, + 'total_count': len(teams_data), + 'filters_applied': { + 'conference': conference, + 'division': division, + 'city': city + } + }), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching teams: {e}') + return make_response(jsonify({"error": "Failed to fetch teams"}), 500) + + +@basketball.route('/teams/', methods=['GET']) +def get_team_by_id(team_id): + """ + Get detailed information for a specific team. + + User Stories: [Marcus-3.3, Andre-4.2] + """ + try: + current_app.logger.info(f'GET /basketball/teams/{team_id} - Fetching team details') + + cursor = db.get_db().cursor() + + # Get comprehensive team details + query = ''' + SELECT + t.*, + COUNT(DISTINCT tp.player_id) AS roster_size, + ROUND(AVG(p.age), 1) AS avg_player_age, + SUM(p.current_salary) AS total_salary, + ROUND(AVG(p.current_salary), 0) AS avg_salary + FROM Teams t + LEFT JOIN TeamsPlayers tp ON t.team_id = tp.team_id AND tp.left_date IS NULL + LEFT JOIN Players p ON tp.player_id = p.player_id + WHERE t.team_id = %s + GROUP BY t.team_id + ''' + + cursor.execute(query, (team_id,)) + team_data = cursor.fetchone() + + if not team_data: + return make_response(jsonify({"error": "Team not found"}), 404) + + # Get recent games performance + cursor.execute(''' + SELECT + g.game_id, + g.game_date, + g.home_team_id, + g.away_team_id, + ht.name AS home_team, + at.name AS away_team, + g.home_score, + g.away_score, + CASE + WHEN g.home_team_id = %s AND g.home_score > g.away_score THEN 'W' + WHEN g.away_team_id = %s AND g.away_score > g.home_score THEN 'W' + ELSE 'L' + END AS result + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE g.home_team_id = %s OR g.away_team_id = %s + ORDER BY g.game_date DESC + LIMIT 10 + ''', (team_id, team_id, team_id, team_id)) + + recent_games = cursor.fetchall() + + response_data = { + 'team_details': team_data, + 'recent_games': recent_games + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching team details: {e}') + return make_response(jsonify({"error": "Failed to fetch team"}), 500) + + +@basketball.route('/teams/', methods=['PUT']) +def update_team(team_id): + """ + Update team information. + + Expected JSON Body (all fields optional): + { + "name": "string", + "city": "string", + "state": "string", + "arena": "string", + "conference": "string", + "division": "string", + "coach": "string", + "gm": "string", + "owner": "string", + "offensive_system": "string", + "defensive_system": "string" + } + + User Stories: [Mike-2.1] + """ + try: + current_app.logger.info(f'PUT /basketball/teams/{team_id} - Updating team') + + team_data = request.get_json() + + if not team_data: + return make_response(jsonify({"error": "No data provided for update"}), 400) + + cursor = db.get_db().cursor() + + # Build dynamic update query + update_fields = [] + values = [] + + allowed_fields = ['name', 'city', 'state', 'arena', 'conference', + 'division', 'coach', 'gm', 'owner', + 'offensive_system', 'defensive_system'] + + for field in allowed_fields: + if field in team_data: + update_fields.append(f'{field} = %s') + values.append(team_data[field]) + + if not update_fields: + return make_response(jsonify({"error": "No valid fields to update"}), 400) + + query = f"UPDATE Teams SET {', '.join(update_fields)} WHERE team_id = %s" + values.append(team_id) + + cursor.execute(query, values) + + if cursor.rowcount == 0: + return make_response(jsonify({"error": "Team not found"}), 404) + + db.get_db().commit() + + return make_response(jsonify({ + "message": "Team updated successfully", + "team_id": team_id, + "updated_fields": list(team_data.keys()) + }), 200) + + except Exception as e: + current_app.logger.error(f'Error updating team: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to update team"}), 500) + + +@basketball.route('/teams//players', methods=['GET']) +def get_team_players(team_id): + """ + Get current team roster with detailed player information. + + Query Parameters: + position: Filter by position (PG, SG, SF, PF, C) + include_stats: If 'true', include player statistics + + User Stories: [Marcus-3.3, Andre-4.2] + """ + try: + current_app.logger.info(f'GET /basketball/teams/{team_id}/players - Fetching roster') + + position = request.args.get('position') + include_stats = request.args.get('include_stats', 'false').lower() == 'true' + + cursor = db.get_db().cursor() + + # Build query based on whether stats are requested + if include_stats: + query = ''' + SELECT DISTINCT + p.player_id, + p.first_name, + p.last_name, + p.position, + tp.jersey_num, + tp.joined_date, + p.age, + p.years_exp, + p.college, + p.height, + p.weight, + p.current_salary, + p.expected_salary, + COUNT(pgs.game_id) AS games_played, + ROUND(AVG(pgs.points), 1) AS avg_points, + ROUND(AVG(pgs.rebounds), 1) AS avg_rebounds, + ROUND(AVG(pgs.assists), 1) AS avg_assists, + ROUND(AVG(pgs.minutes_played), 1) AS avg_minutes + FROM Players p + JOIN TeamsPlayers tp ON p.player_id = tp.player_id + LEFT JOIN PlayerGameStats pgs ON p.player_id = pgs.player_id + WHERE tp.team_id = %s + AND tp.left_date IS NULL + ''' + else: + query = ''' + SELECT DISTINCT + p.player_id, + p.first_name, + p.last_name, + p.position, + tp.jersey_num, + tp.joined_date, + p.age, + p.years_exp, + p.college, + p.height, + p.weight, + p.current_salary, + p.expected_salary + FROM Players p + JOIN TeamsPlayers tp ON p.player_id = tp.player_id + WHERE tp.team_id = %s + AND tp.left_date IS NULL + ''' + + params = [team_id] + + if position: + query += ' AND p.position = %s' + params.append(position) + + if include_stats: + query += ''' + GROUP BY p.player_id, p.first_name, p.last_name, p.position, + tp.jersey_num, tp.joined_date, p.age, p.years_exp, + p.college, p.height, p.weight, p.current_salary, p.expected_salary + ''' + + query += ' ORDER BY tp.jersey_num, p.last_name' + + cursor.execute(query, params) + roster_data = cursor.fetchall() + + # Get team name for context + cursor.execute('SELECT name FROM Teams WHERE team_id = %s', (team_id,)) + team_info = cursor.fetchone() + + response_data = { + 'team_id': team_id, + 'team_name': team_info['name'] if team_info else None, + 'roster_size': len(roster_data), + 'players': roster_data, + 'includes_stats': include_stats, + 'position_filter': position + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching team roster: {e}') + return make_response(jsonify({"error": "Failed to fetch team roster"}), 500) + + +@basketball.route('/teams//players', methods=['POST']) +def add_team_player(team_id): + """ + Add a player to team roster. + + Expected JSON Body: + { + "player_id": int (required), + "jersey_num": int (required), + "joined_date": "YYYY-MM-DD" (required) + } + + User Stories: [Mike-2.2] + """ + try: + current_app.logger.info(f'POST /basketball/teams/{team_id}/players - Adding player to roster') + + roster_data = request.get_json() + player_id = roster_data.get('player_id') + jersey_num = roster_data.get('jersey_num') + joined_date = roster_data.get('joined_date') + + if not player_id or jersey_num is None or not joined_date: + return make_response(jsonify({ + "error": "Missing required fields: player_id, jersey_num, joined_date" + }), 400) + + cursor = db.get_db().cursor() + + # Validate player exists + cursor.execute('SELECT player_id FROM Players WHERE player_id = %s', (player_id,)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Player not found"}), 404) + + # Check jersey number availability + cursor.execute(''' + SELECT player_id FROM TeamsPlayers + WHERE team_id = %s AND jersey_num = %s AND left_date IS NULL + ''', (team_id, jersey_num)) + if cursor.fetchone(): + return make_response(jsonify({ + "error": f"Jersey number {jersey_num} is already taken" + }), 409) + + # End any existing team association for this player + cursor.execute(''' + UPDATE TeamsPlayers + SET left_date = %s + WHERE player_id = %s AND left_date IS NULL + ''', (joined_date, player_id)) + + # Add player to the new team + query = ''' + INSERT INTO TeamsPlayers (team_id, player_id, jersey_num, joined_date) + VALUES (%s, %s, %s, %s) + ''' + + cursor.execute(query, (team_id, player_id, jersey_num, joined_date)) + db.get_db().commit() + + return make_response(jsonify({ + "message": "Player added to roster successfully", + "team_id": team_id, + "player_id": player_id, + "jersey_num": jersey_num + }), 201) + + except Exception as e: + current_app.logger.error(f'Error adding player to roster: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to add player to roster"}), 500) + + +@basketball.route('/teams//players/', methods=['PUT']) +def update_team_player(team_id, player_id): + """ + Update player's status on team (jersey number, left date, etc.). + + Expected JSON Body (all fields optional): + { + "jersey_num": int, + "left_date": "YYYY-MM-DD", + "status": "active|injured|suspended" + } + + User Stories: [Mike-2.1, Johnny-1.6] + """ + try: + current_app.logger.info(f'PUT /basketball/teams/{team_id}/players/{player_id} - Updating player status') + + update_data = request.get_json() + + if not update_data: + return make_response(jsonify({"error": "No data provided for update"}), 400) + + cursor = db.get_db().cursor() + + # Verify player-team association exists + cursor.execute(''' + SELECT * FROM TeamsPlayers + WHERE team_id = %s AND player_id = %s AND left_date IS NULL + ''', (team_id, player_id)) + + if not cursor.fetchone(): + return make_response(jsonify({"error": "Player not found on this team"}), 404) + + # Build dynamic update query + update_fields = [] + values = [] + + if 'jersey_num' in update_data: + jersey_num = update_data['jersey_num'] + # Check if new jersey number is available + cursor.execute(''' + SELECT player_id FROM TeamsPlayers + WHERE team_id = %s AND jersey_num = %s AND player_id != %s AND left_date IS NULL + ''', (team_id, jersey_num, player_id)) + if cursor.fetchone(): + return make_response(jsonify({ + "error": f"Jersey number {jersey_num} is already taken" + }), 409) + update_fields.append('jersey_num = %s') + values.append(jersey_num) + + if 'left_date' in update_data: + update_fields.append('left_date = %s') + values.append(update_data['left_date']) + + if 'status' in update_data: + update_fields.append('status = %s') + values.append(update_data['status']) + + if not update_fields: + return make_response(jsonify({"error": "No valid fields to update"}), 400) + + query = f''' + UPDATE TeamsPlayers + SET {', '.join(update_fields)} + WHERE team_id = %s AND player_id = %s AND left_date IS NULL + ''' + + values.extend([team_id, player_id]) + cursor.execute(query, values) + db.get_db().commit() + + return make_response(jsonify({ + "message": "Player status updated successfully", + "team_id": team_id, + "player_id": player_id, + "updated_fields": list(update_data.keys()) + }), 200) + + except Exception as e: + current_app.logger.error(f'Error updating player status: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to update player status"}), 500) + + +# ============================================================================ +# GAME MANAGEMENT ROUTES +# ============================================================================ + +@basketball.route('/games', methods=['GET']) +def get_games(): + """ + Get games list with optional filters. + + Query Parameters: + team_id: Filter by team (home or away) + start_date: Filter games from this date (YYYY-MM-DD) + end_date: Filter games until this date (YYYY-MM-DD) + season: Filter by season + game_type: Filter by game type ('regular', 'playoff') + status: Filter by status ('scheduled', 'in_progress', 'completed') + + User Stories: [Johnny-1.5, Marcus-3.6] + """ + try: + current_app.logger.info('GET /basketball/games - Fetching games schedule') + + # Extract query parameters + team_id = request.args.get('team_id', type=int) + start_date = request.args.get('start_date') + end_date = request.args.get('end_date') + season = request.args.get('season') + game_type = request.args.get('game_type') + status = request.args.get('status') + + cursor = db.get_db().cursor() + + # Build comprehensive games query + query = ''' + SELECT + g.game_id, + g.game_date, + TIME_FORMAT(g.game_time, '%%H:%%i:%%s') AS game_time, + g.home_team_id, + g.away_team_id, + ht.name AS home_team_name, + ht.city AS home_team_city, + at.name AS away_team_name, + at.city AS away_team_city, + g.home_score, + g.away_score, + g.season, + g.game_type, + g.status, + g.attendance, + g.venue, + CASE + WHEN g.home_score > g.away_score THEN ht.name + WHEN g.away_score > g.home_score THEN at.name + ELSE NULL + END AS winner + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE 1=1 + ''' + + params = [] + + # Apply filters dynamically + if team_id: + query += ' AND (g.home_team_id = %s OR g.away_team_id = %s)' + params.extend([team_id, team_id]) + if start_date: + query += ' AND g.game_date >= %s' + params.append(start_date) + if end_date: + query += ' AND g.game_date <= %s' + params.append(end_date) + if season: + query += ' AND g.season = %s' + params.append(season) + if game_type: + query += ' AND g.game_type = %s' + params.append(game_type) + if status: + query += ' AND g.status = %s' + params.append(status) + + query += ' ORDER BY g.game_date DESC' + + cursor.execute(query, params) + games_data = cursor.fetchall() + + # Calculate summary statistics + completed_games = len([g for g in games_data if g['status'] == 'completed']) + scheduled_games = len([g for g in games_data if g['status'] == 'scheduled']) + in_progress_games = len([g for g in games_data if g['status'] == 'in_progress']) + + response_data = { + 'games': games_data, + 'summary': { + 'total_games': len(games_data), + 'completed_games': completed_games, + 'scheduled_games': scheduled_games, + 'in_progress_games': in_progress_games + }, + 'filters_applied': { + 'team_id': team_id, + 'date_range': f"{start_date} to {end_date}" if start_date or end_date else None, + 'season': season, + 'game_type': game_type, + 'status': status + } + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching games: {e}') + return make_response(jsonify({"error": "Failed to fetch games"}), 500) + + +@basketball.route('/games', methods=['POST']) +def create_game(): + """ + Create a new game. + + Expected JSON Body: + { + "game_date": "YYYY-MM-DD" (required), + "game_time": "HH:MM:SS" (optional), + "home_team_id": int (required), + "away_team_id": int (required), + "season": "string" (required), + "game_type": "regular|playoff" (default: "regular"), + "status": "scheduled|in_progress|completed" (default: "scheduled"), + "venue": "string" (optional), + "attendance": int (optional) + } + + User Stories: [Mike-2.1] + """ + try: + current_app.logger.info('POST /basketball/games - Creating new game') + + game_data = request.get_json() + + # Validate required fields + required_fields = ['game_date', 'home_team_id', 'away_team_id', 'season'] + for field in required_fields: + if field not in game_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + # Business logic validations + if game_data['home_team_id'] == game_data['away_team_id']: + return make_response(jsonify({"error": "Home and away teams must be different"}), 400) + + # Validate enums + valid_game_types = ['regular', 'playoff'] + if 'game_type' in game_data and game_data['game_type'] not in valid_game_types: + return make_response(jsonify({ + "error": f"Invalid game_type. Must be one of: {valid_game_types}" + }), 400) + + valid_statuses = ['scheduled', 'in_progress', 'completed'] + if 'status' in game_data and game_data['status'] not in valid_statuses: + return make_response(jsonify({ + "error": f"Invalid status. Must be one of: {valid_statuses}" + }), 400) + + cursor = db.get_db().cursor() + + # Verify both teams exist + cursor.execute('SELECT team_id FROM Teams WHERE team_id IN (%s, %s)', + (game_data['home_team_id'], game_data['away_team_id'])) + + if cursor.rowcount != 2: + return make_response(jsonify({"error": "One or both teams not found"}), 404) + + # Check for duplicate game + cursor.execute(''' + SELECT game_id FROM Game + WHERE game_date = %s + AND home_team_id = %s + AND away_team_id = %s + ''', (game_data['game_date'], game_data['home_team_id'], game_data['away_team_id'])) + + if cursor.fetchone(): + return make_response(jsonify({ + "error": "Game already exists for these teams on this date" + }), 409) + + # Insert new game + query = ''' + INSERT INTO Game ( + game_date, game_time, home_team_id, away_team_id, + home_score, away_score, season, game_type, status, venue, attendance + ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + ''' + + values = ( + game_data['game_date'], + game_data.get('game_time'), + game_data['home_team_id'], + game_data['away_team_id'], + game_data.get('home_score', 0), + game_data.get('away_score', 0), + game_data['season'], + game_data.get('game_type', 'regular'), + game_data.get('status', 'scheduled'), + game_data.get('venue'), + game_data.get('attendance') + ) + + cursor.execute(query, values) + db.get_db().commit() + + new_game_id = cursor.lastrowid + + return make_response(jsonify({ + "message": "Game created successfully", + "game_id": new_game_id + }), 201) + + except Exception as e: + current_app.logger.error(f'Error creating game: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to create game"}), 500) + + +@basketball.route('/games/', methods=['GET']) +def get_game_details(game_id): + """ + Get detailed information for a specific game including player stats. + + User Stories: [Johnny-1.5, Marcus-3.6] + """ + try: + current_app.logger.info(f'GET /basketball/games/{game_id} - Fetching game details') + + cursor = db.get_db().cursor() + + # Get comprehensive game details + query = ''' + SELECT + g.game_id, + g.game_date, + TIME_FORMAT(g.game_time, '%%H:%%i:%%s') AS game_time, + g.home_team_id, + g.away_team_id, + g.home_score, + g.away_score, + g.season, + g.game_type, + g.status, + g.attendance, + g.venue, + ht.name AS home_team_name, + ht.city AS home_team_city, + ht.coach AS home_team_coach, + at.name AS away_team_name, + at.city AS away_team_city, + at.coach AS away_team_coach + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE g.game_id = %s + ''' + + cursor.execute(query, (game_id,)) + game_data = cursor.fetchone() + + if not game_data: + return make_response(jsonify({"error": "Game not found"}), 404) + + # Get player statistics for this game + cursor.execute(''' + SELECT + pgs.player_id, + pgs.game_id, + pgs.points, + pgs.rebounds, + pgs.assists, + pgs.steals, + pgs.blocks, + pgs.turnovers, + pgs.shooting_percentage, + pgs.three_point_percentage, + pgs.free_throw_percentage, + pgs.plus_minus, + pgs.minutes_played, + p.first_name, + p.last_name, + p.position, + tp.team_id, + t.name AS team_name + FROM PlayerGameStats pgs + JOIN Players p ON pgs.player_id = p.player_id + JOIN TeamsPlayers tp ON p.player_id = tp.player_id AND tp.left_date IS NULL + JOIN Teams t ON tp.team_id = t.team_id + WHERE pgs.game_id = %s + ORDER BY tp.team_id, pgs.points DESC + ''', (game_id,)) + + player_stats = cursor.fetchall() + + # Separate stats by team + home_team_stats = [stat for stat in player_stats if stat['team_id'] == game_data['home_team_id']] + away_team_stats = [stat for stat in player_stats if stat['team_id'] == game_data['away_team_id']] + + response_data = { + 'game_details': game_data, + 'home_team_stats': home_team_stats, + 'away_team_stats': away_team_stats, + 'total_players': len(player_stats) + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching game details: {e}') + return make_response(jsonify({"error": "Failed to fetch game details"}), 500) + + +@basketball.route('/games/', methods=['PUT']) +def update_game(game_id): + """ + Update game information and scores. + + Expected JSON Body (all fields optional): + { + "game_date": "YYYY-MM-DD", + "game_time": "HH:MM:SS", + "home_score": int, + "away_score": int, + "season": "string", + "game_type": "regular|playoff", + "status": "scheduled|in_progress|completed", + "venue": "string", + "attendance": int + } + + User Stories: [Mike-2.1] + """ + try: + current_app.logger.info(f'PUT /basketball/games/{game_id} - Updating game') + + game_data = request.get_json() + + if not game_data: + return make_response(jsonify({"error": "No data provided for update"}), 400) + + cursor = db.get_db().cursor() + + # Verify game exists + cursor.execute('SELECT game_id FROM Game WHERE game_id = %s', (game_id,)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Game not found"}), 404) + + # Validate enum fields + if 'game_type' in game_data: + valid_game_types = ['regular', 'playoff'] + if game_data['game_type'] not in valid_game_types: + return make_response(jsonify({ + "error": f"Invalid game_type. Must be one of: {valid_game_types}" + }), 400) + + if 'status' in game_data: + valid_statuses = ['scheduled', 'in_progress', 'completed'] + if game_data['status'] not in valid_statuses: + return make_response(jsonify({ + "error": f"Invalid status. Must be one of: {valid_statuses}" + }), 400) + + # Validate score values + for score_field in ['home_score', 'away_score']: + if score_field in game_data and game_data[score_field] < 0: + return make_response(jsonify({ + "error": f"{score_field} cannot be negative" + }), 400) + + if 'attendance' in game_data and game_data['attendance'] < 0: + return make_response(jsonify({"error": "Attendance cannot be negative"}), 400) + + # Build dynamic update query + update_fields = [] + values = [] + + allowed_fields = ['game_date', 'game_time', 'home_score', 'away_score', + 'season', 'game_type', 'status', 'venue', 'attendance'] + + for field in allowed_fields: + if field in game_data: + update_fields.append(f'{field} = %s') + values.append(game_data[field]) + + if not update_fields: + return make_response(jsonify({"error": "No valid fields to update"}), 400) + + query = f"UPDATE Game SET {', '.join(update_fields)} WHERE game_id = %s" + values.append(game_id) + + cursor.execute(query, values) + db.get_db().commit() + + return make_response(jsonify({ + "message": "Game updated successfully", + "game_id": game_id, + "updated_fields": list(game_data.keys()) + }), 200) + + except Exception as e: + current_app.logger.error(f'Error updating game: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to update game"}), 500) + + +@basketball.route('/games/upcoming', methods=['GET']) +def get_upcoming_games(): + """ + Get upcoming games for the next specified days. + + Query Parameters: + days: Number of days to look ahead (default: 7) + team_id: Filter by specific team + + User Stories: [Johnny-1.5, Marcus-3.6] + """ + try: + current_app.logger.info('GET /basketball/games/upcoming - Fetching upcoming games') + + days = request.args.get('days', 7, type=int) + team_id = request.args.get('team_id', type=int) + + cursor = db.get_db().cursor() + + # Calculate date range + today = datetime.now().date() + end_date = today + timedelta(days=days) + + query = ''' + SELECT + g.game_id, + g.game_date, + TIME_FORMAT(g.game_time, '%%H:%%i:%%s') AS game_time, + g.home_team_id, + g.away_team_id, + ht.name AS home_team_name, + at.name AS away_team_name, + g.venue, + g.game_type, + g.status + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE g.game_date BETWEEN %s AND %s + AND g.status IN ('scheduled', 'in_progress') + ''' + + params = [today, end_date] + + if team_id: + query += ' AND (g.home_team_id = %s OR g.away_team_id = %s)' + params.extend([team_id, team_id]) + + query += ' ORDER BY g.game_date, g.game_time' + + cursor.execute(query, params) + upcoming_games = cursor.fetchall() + + response_data = { + 'upcoming_games': upcoming_games, + 'date_range': { + 'start': str(today), + 'end': str(end_date), + 'days_ahead': days + }, + 'total_games': len(upcoming_games) + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching upcoming games: {e}') + return make_response(jsonify({"error": "Failed to fetch upcoming games"}), 500) + + +@basketball.route('/teams//schedule', methods=['GET']) +def get_team_schedule(team_id): + """ + Get a specific team's schedule with win/loss records. + + Query Parameters: + season: Optional season filter + status: Optional status filter + + User Stories: [Marcus-3.6, Johnny-1.5] + """ + try: + current_app.logger.info(f'GET /basketball/teams/{team_id}/schedule - Fetching team schedule') + + season = request.args.get('season') + status = request.args.get('status') + + cursor = db.get_db().cursor() + + # Verify team exists + cursor.execute('SELECT team_id, name FROM Teams WHERE team_id = %s', (team_id,)) + team_info = cursor.fetchone() + + if not team_info: + return make_response(jsonify({"error": "Team not found"}), 404) + + query = ''' + SELECT + g.game_id, + g.game_date, + TIME_FORMAT(g.game_time, '%%H:%%i:%%s') AS game_time, + g.home_team_id, + g.away_team_id, + CASE + WHEN g.home_team_id = %s THEN 'Home' + ELSE 'Away' + END AS home_away, + CASE + WHEN g.home_team_id = %s THEN at.name + ELSE ht.name + END AS opponent, + g.home_score, + g.away_score, + CASE + WHEN g.status = 'completed' THEN + CASE + WHEN (g.home_team_id = %s AND g.home_score > g.away_score) OR + (g.away_team_id = %s AND g.away_score > g.home_score) THEN 'W' + ELSE 'L' + END + ELSE NULL + END AS result, + g.season, + g.game_type, + g.status, + g.venue + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE (g.home_team_id = %s OR g.away_team_id = %s) + ''' + + params = [team_id, team_id, team_id, team_id, team_id, team_id] + + if season: + query += ' AND g.season = %s' + params.append(season) + if status: + query += ' AND g.status = %s' + params.append(status) + + query += ' ORDER BY g.game_date DESC' + + cursor.execute(query, params) + schedule = cursor.fetchall() + + # Calculate team record + completed_games = [g for g in schedule if g['result'] is not None] + wins = len([g for g in completed_games if g['result'] == 'W']) + losses = len([g for g in completed_games if g['result'] == 'L']) + + response_data = { + 'team_id': team_id, + 'team_name': team_info['name'], + 'schedule': schedule, + 'record': { + 'wins': wins, + 'losses': losses, + 'games_played': len(completed_games), + 'win_percentage': round((wins / len(completed_games)) * 100, 1) if completed_games else 0 + }, + 'filters': { + 'season': season, + 'status': status + } + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching team schedule: {e}') + return make_response(jsonify({"error": "Failed to fetch team schedule"}), 500) + + +@basketball.route('/games/', methods=['DELETE']) +def delete_game(game_id): + """ + Delete a game (admin function). + This will cascade delete all related player stats. + + User Stories: [Mike-2.3] (Data cleanup) + """ + try: + current_app.logger.info(f'DELETE /basketball/games/{game_id} - Deleting game') + + cursor = db.get_db().cursor() + + # Verify game exists + cursor.execute('SELECT game_id FROM Game WHERE game_id = %s', (game_id,)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Game not found"}), 404) + + # Delete the game (cascades to PlayerGameStats) + cursor.execute('DELETE FROM Game WHERE game_id = %s', (game_id,)) + db.get_db().commit() + + return make_response(jsonify({ + "message": "Game deleted successfully", + "game_id": game_id + }), 200) + + except Exception as e: + current_app.logger.error(f'Error deleting game: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to delete game"}), 500) \ No newline at end of file diff --git a/api/backend/customers/customer_routes.py b/api/backend/customers/customer_routes.py deleted file mode 100644 index 4fda460220..0000000000 --- a/api/backend/customers/customer_routes.py +++ /dev/null @@ -1,83 +0,0 @@ -######################################################## -# Sample customers blueprint of endpoints -# Remove this file if you are not using it in your project -######################################################## -from flask import Blueprint -from flask import request -from flask import jsonify -from flask import make_response -from flask import current_app -from backend.db_connection import db -from backend.ml_models.model01 import predict - -#------------------------------------------------------------ -# Create a new Blueprint object, which is a collection of -# routes. -customers = Blueprint('customers', __name__) - - -#------------------------------------------------------------ -# Get all customers from the system -@customers.route('/customers', methods=['GET']) -def get_customers(): - - cursor = db.get_db().cursor() - cursor.execute('''SELECT id, company, last_name, - first_name, job_title, business_phone FROM customers - ''') - - theData = cursor.fetchall() - - the_response = make_response(jsonify(theData)) - the_response.status_code = 200 - return the_response - -#------------------------------------------------------------ -# Update customer info for customer with particular userID -# Notice the manner of constructing the query. -@customers.route('/customers', methods=['PUT']) -def update_customer(): - current_app.logger.info('PUT /customers route') - cust_info = request.json - cust_id = cust_info['id'] - first = cust_info['first_name'] - last = cust_info['last_name'] - company = cust_info['company'] - - query = 'UPDATE customers SET first_name = %s, last_name = %s, company = %s where id = %s' - data = (first, last, company, cust_id) - cursor = db.get_db().cursor() - r = cursor.execute(query, data) - db.get_db().commit() - return 'customer updated!' - -#------------------------------------------------------------ -# Get customer detail for customer with particular userID -# Notice the manner of constructing the query. -@customers.route('/customers/', methods=['GET']) -def get_customer(userID): - current_app.logger.info('GET /customers/ route') - cursor = db.get_db().cursor() - cursor.execute('SELECT id, first_name, last_name FROM customers WHERE id = {0}'.format(userID)) - - theData = cursor.fetchall() - - the_response = make_response(jsonify(theData)) - the_response.status_code = 200 - return the_response - -#------------------------------------------------------------ -# Makes use of the very simple ML model in to predict a value -# and returns it to the user -@customers.route('/prediction//', methods=['GET']) -def predict_value(var01, var02): - current_app.logger.info(f'var01 = {var01}') - current_app.logger.info(f'var02 = {var02}') - - returnVal = predict(var01, var02) - return_dict = {'result': returnVal} - - the_response = make_response(jsonify(return_dict)) - the_response.status_code = 200 - the_response.mimetype = 'application/json' - return the_response \ No newline at end of file diff --git a/api/backend/db_connection/__init__.py b/api/backend/db_connection/__init__.py index fe568586a1..735b801608 100644 --- a/api/backend/db_connection/__init__.py +++ b/api/backend/db_connection/__init__.py @@ -4,7 +4,6 @@ from flaskext.mysql import MySQL from pymysql import cursors - # the parameter instructs the connection to return data # as a dictionary object. db = MySQL(cursorclass=cursors.DictCursor) \ No newline at end of file diff --git a/api/backend/games/games_routes.py b/api/backend/games/games_routes.py new file mode 100644 index 0000000000..82f1202edd --- /dev/null +++ b/api/backend/games/games_routes.py @@ -0,0 +1,428 @@ +######################################################## +# Games Blueprint +# Game scheduling, scores, and management +######################################################## +from flask import Blueprint, request, jsonify, make_response, current_app +from backend.db_connection import db +from datetime import datetime, timedelta + +games = Blueprint('games', __name__) + + +#------------------------------------------------------------ +# Get games list/schedule [Johnny-1.5, Marcus-3.6] +@games.route('/games', methods=['GET']) +def get_games(): + """ + Filters: + - team_id: int (home or away) + - start_date / end_date: YYYY-MM-DD (matches Game.date) + - season: string + - game_type: 'regular' | 'playoff' (maps to is_playoff 0/1) + - status: 'scheduled' | 'completed' (derived: scheduled => future/zero scores) + """ + try: + current_app.logger.info('GET /games handler') + + team_id = request.args.get('team_id', type=int) + start_date = request.args.get('start_date') + end_date = request.args.get('end_date') + season = request.args.get('season') + game_type = request.args.get('game_type') # regular | playoff + status = request.args.get('status') # scheduled | completed (derived) + + cursor = db.get_db().cursor() + + # Base query with only valid columns from your schema + query = ''' + SELECT + g.game_id, + g.date AS game_date, + g.season, + g.is_playoff, + CASE WHEN g.is_playoff = 1 THEN 'playoff' ELSE 'regular' END AS game_type, + g.home_team_id, + g.away_team_id, + ht.name AS home_team_name, + ht.abrv AS home_team_abrv, + at.name AS away_team_name, + at.abrv AS away_team_abrv, + g.home_score, + g.away_score, + -- Derive a simple status without a status column: + CASE + WHEN (g.date >= CURDATE() AND g.home_score = 0 AND g.away_score = 0) THEN 'scheduled' + ELSE 'completed' + END AS status, + CASE + WHEN g.home_score > g.away_score THEN ht.name + WHEN g.away_score > g.home_score THEN at.name + ELSE NULL + END AS winner + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE 1=1 + ''' + + params = [] + + if team_id: + query += ' AND (g.home_team_id = %s OR g.away_team_id = %s)' + params.extend([team_id, team_id]) + + if start_date: + query += ' AND g.date >= %s' + params.append(start_date) + + if end_date: + query += ' AND g.date <= %s' + params.append(end_date) + + if season: + query += ' AND g.season = %s' + params.append(season) + + if game_type: + is_playoff = 1 if game_type.lower() == 'playoff' else 0 + query += ' AND g.is_playoff = %s' + params.append(is_playoff) + + # Filter on derived status by wrapping as a subquery + if status: + query = f"SELECT * FROM ({query}) AS q WHERE q.status = %s" + params.append(status.lower()) + + query += ' ORDER BY game_date DESC, game_id DESC' + + current_app.logger.debug("SQL: %s | params: %s", query, params) + cursor.execute(query, params) + rows = cursor.fetchall() + + completed = [r for r in rows if r['status'] == 'completed'] + scheduled = [r for r in rows if r['status'] == 'scheduled'] + + return make_response(jsonify({ + 'games': rows, + 'total_games': len(rows), + 'completed_games': len(completed), + 'scheduled_games': len(scheduled), + 'filters_applied': { + 'team_id': team_id, + 'start_date': start_date, + 'end_date': end_date, + 'season': season, + 'game_type': game_type, + 'status': status + } + }), 200) + + except Exception as e: + import traceback + current_app.logger.error("Error fetching games: %s\n%s", e, traceback.format_exc()) + return make_response(jsonify({"error": "Failed to fetch games"}), 500) + + + +#------------------------------------------------------------ +# Create new game [Mike-2.1] +@games.route('/games', methods=['POST']) +def create_game(): + """ + Create a new game. + Expected JSON body: + { + "game_date": "YYYY-MM-DD" (required), + "game_time": "HH:MM:SS" (required), + "home_team_id": int (required), + "away_team_id": int (required), + "season": "string" (required), + "game_type": "regular|playoff" (default: "regular"), + "venue": "string", + "status": "scheduled|in_progress|completed" (default: "scheduled") + } + """ + try: + current_app.logger.info('POST /games handler') + + game_data = request.get_json() + + # Validate required fields + required_fields = ['game_date', 'game_time', 'home_team_id', 'away_team_id', 'season'] + for field in required_fields: + if field not in game_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + # Validate teams exist and are different + if game_data['home_team_id'] == game_data['away_team_id']: + return make_response(jsonify({"error": "Home and away teams must be different"}), 400) + + cursor = db.get_db().cursor() + + # Check if both teams exist + cursor.execute('SELECT team_id FROM Teams WHERE team_id IN (%s, %s)', + (game_data['home_team_id'], game_data['away_team_id'])) + + if cursor.rowcount != 2: + return make_response(jsonify({"error": "One or both teams not found"}), 404) + + # Check for duplicate game + cursor.execute(''' + SELECT game_id FROM Game + WHERE game_date = %s + AND home_team_id = %s + AND away_team_id = %s + ''', (game_data['game_date'], game_data['home_team_id'], game_data['away_team_id'])) + + if cursor.fetchone(): + return make_response(jsonify({"error": "Game already exists for these teams on this date"}), 409) + + # Insert new game + query = ''' + INSERT INTO Game ( + game_date, game_time, home_team_id, away_team_id, + season, game_type, venue, status, home_score, away_score + ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + ''' + + values = ( + game_data['game_date'], + game_data['game_time'], + game_data['home_team_id'], + game_data['away_team_id'], + game_data['season'], + game_data.get('game_type', 'regular'), + game_data.get('venue'), + game_data.get('status', 'scheduled'), + 0, # Initial home_score + 0 # Initial away_score + ) + + cursor.execute(query, values) + db.get_db().commit() + + new_game_id = cursor.lastrowid + + return make_response(jsonify({ + "message": "Game created successfully", + "game_id": new_game_id + }), 201) + + except Exception as e: + current_app.logger.error(f'Error creating game: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to create game"}), 500) + + +#------------------------------------------------------------ +# Update game info/scores [Mike-2.1] +@games.route('/games/', methods=['PUT']) +def update_game(game_id): + """ + Update game information and scores. + Expected JSON body (all fields optional): + { + "game_date": "YYYY-MM-DD", + "game_time": "HH:MM:SS", + "home_score": int, + "away_score": int, + "status": "scheduled|in_progress|completed", + "attendance": int, + "venue": "string" + } + """ + try: + current_app.logger.info(f'PUT /games/{game_id} handler') + + game_data = request.get_json() + + if not game_data: + return make_response(jsonify({"error": "No data provided for update"}), 400) + + cursor = db.get_db().cursor() + + # Check if game exists + cursor.execute('SELECT game_id FROM Game WHERE game_id = %s', (game_id,)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Game not found"}), 404) + + # Build dynamic update query + update_fields = [] + values = [] + + allowed_fields = ['game_date', 'game_time', 'home_score', 'away_score', + 'status', 'attendance', 'venue'] + + for field in allowed_fields: + if field in game_data: + update_fields.append(f'{field} = %s') + values.append(game_data[field]) + + if not update_fields: + return make_response(jsonify({"error": "No valid fields to update"}), 400) + + # Add validation for scores + if 'home_score' in game_data and game_data['home_score'] < 0: + return make_response(jsonify({"error": "Home score cannot be negative"}), 400) + + if 'away_score' in game_data and game_data['away_score'] < 0: + return make_response(jsonify({"error": "Away score cannot be negative"}), 400) + + query = f"UPDATE Game SET {', '.join(update_fields)} WHERE game_id = %s" + values.append(game_id) + + cursor.execute(query, values) + db.get_db().commit() + + return make_response(jsonify({ + "message": "Game updated successfully", + "game_id": game_id, + "updated_fields": list(game_data.keys()) + }), 200) + + except Exception as e: + current_app.logger.error(f'Error updating game: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to update game"}), 500) + + +#------------------------------------------------------------ +# Get specific game details +@games.route('/games/', methods=['GET']) +def get_game_details(game_id): + """ + Get detailed information for a specific game including player stats. + """ + try: + current_app.logger.info(f'GET /games/{game_id} handler') + + cursor = db.get_db().cursor() + + # Get game details + query = ''' + SELECT + g.*, + ht.name AS home_team_name, + ht.city AS home_team_city, + ht.coach AS home_team_coach, + at.name AS away_team_name, + at.city AS away_team_city, + at.coach AS away_team_coach + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE g.game_id = %s + ''' + + cursor.execute(query, (game_id,)) + game_data = cursor.fetchone() + + if not game_data: + return make_response(jsonify({"error": "Game not found"}), 404) + + # Get player stats for this game + cursor.execute(''' + SELECT + pgs.*, + p.first_name, + p.last_name, + p.position, + tp.team_id, + t.name AS team_name + FROM PlayerGameStats pgs + JOIN Players p ON pgs.player_id = p.player_id + JOIN TeamsPlayers tp ON p.player_id = tp.player_id AND tp.left_date IS NULL + JOIN Teams t ON tp.team_id = t.team_id + WHERE pgs.game_id = %s + ORDER BY tp.team_id, pgs.points DESC + ''', (game_id,)) + + player_stats = cursor.fetchall() + + # Separate stats by team + home_team_stats = [stat for stat in player_stats if stat['team_id'] == game_data['home_team_id']] + away_team_stats = [stat for stat in player_stats if stat['team_id'] == game_data['away_team_id']] + + response_data = { + 'game': game_data, + 'home_team_stats': home_team_stats, + 'away_team_stats': away_team_stats, + 'total_players': len(player_stats) + } + + the_response = make_response(jsonify(response_data)) + the_response.status_code = 200 + return the_response + + except Exception as e: + current_app.logger.error(f'Error fetching game details: {e}') + return make_response(jsonify({"error": "Failed to fetch game details"}), 500) + + +#------------------------------------------------------------ +# Get upcoming games schedule +@games.route('/games/upcoming', methods=['GET']) +def get_upcoming_games(): + """ + Get upcoming games for the next specified days. + Query parameters: + - days: number of days to look ahead (default: 7) + - team_id: filter by specific team + """ + try: + current_app.logger.info('GET /games/upcoming handler') + + days = request.args.get('days', 7, type=int) + team_id = request.args.get('team_id', type=int) + + cursor = db.get_db().cursor() + + # Calculate date range + today = datetime.now().date() + end_date = today + timedelta(days=days) + + query = ''' + SELECT + g.game_id, + g.game_date, + g.game_time, + g.home_team_id, + g.away_team_id, + ht.name AS home_team_name, + at.name AS away_team_name, + g.venue, + g.game_type + FROM Game g + JOIN Teams ht ON g.home_team_id = ht.team_id + JOIN Teams at ON g.away_team_id = at.team_id + WHERE g.game_date BETWEEN %s AND %s + AND g.status = 'scheduled' + ''' + + params = [today, end_date] + + if team_id: + query += ' AND (g.home_team_id = %s OR g.away_team_id = %s)' + params.extend([team_id, team_id]) + + query += ' ORDER BY g.game_date, g.game_time' + + cursor.execute(query, params) + theData = cursor.fetchall() + + response_data = { + 'upcoming_games': theData, + 'date_range': { + 'start': str(today), + 'end': str(end_date) + }, + 'total_games': len(theData) + } + + the_response = make_response(jsonify(response_data)) + the_response.status_code = 200 + return the_response + + except Exception as e: + current_app.logger.error(f'Error fetching upcoming games: {e}') + return make_response(jsonify({"error": "Failed to fetch upcoming games"}), 500) \ No newline at end of file diff --git a/api/backend/ml_models/model01.py b/api/backend/ml_models/model01.py deleted file mode 100644 index 368152fbab..0000000000 --- a/api/backend/ml_models/model01.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -model01.py is an example of how to access model parameter values that you are storing -in the database and use them to make a prediction when a route associated with prediction is -accessed. -""" -from backend.db_connection import db -import numpy as np -import logging - - -def train(): - """ - You could have a function that performs training from scratch as well as testing (see below). - It could be activated from a route for an "administrator role" or something similar. - """ - return 'Training the model' - -def test(): - return 'Testing the model' - -def predict(var01, var02): - """ - Retreives model parameters from the database and uses them for real-time prediction - """ - # get a database cursor - cursor = db.get_db().cursor() - # get the model params from the database - query = 'SELECT beta_vals FROM model1_params ORDER BY sequence_number DESC LIMIT 1' - cursor.execute(query) - return_val = cursor.fetchone() - - params = return_val['beta_vals'] - logging.info(f'params = {params}') - logging.info(f'params datatype = {type(params)}') - - # turn the values from the database into a numpy array - params_array = np.array(list(map(float, params[1:-1].split(',')))) - logging.info(f'params array = {params_array}') - logging.info(f'params_array datatype = {type(params_array)}') - - # turn the variables sent from the UI into a numpy array - input_array = np.array([1.0, float(var01), float(var02)]) - - # calculate the dot product (since this is a fake regression) - prediction = np.dot(params_array, input_array) - - return prediction - diff --git a/api/backend/products/products_routes.py b/api/backend/products/products_routes.py deleted file mode 100644 index a3e596d0d3..0000000000 --- a/api/backend/products/products_routes.py +++ /dev/null @@ -1,208 +0,0 @@ -######################################################## -# Sample customers blueprint of endpoints -# Remove this file if you are not using it in your project -######################################################## - -from flask import Blueprint -from flask import request -from flask import jsonify -from flask import make_response -from flask import current_app -from backend.db_connection import db - -#------------------------------------------------------------ -# Create a new Blueprint object, which is a collection of -# routes. -products = Blueprint('products', __name__) - -#------------------------------------------------------------ -# Get all the products from the database, package them up, -# and return them to the client -@products.route('/products', methods=['GET']) -def get_products(): - query = ''' - SELECT id, - product_code, - product_name, - list_price, - category - FROM products - ''' - - # get a cursor object from the database - cursor = db.get_db().cursor() - - # use cursor to query the database for a list of products - cursor.execute(query) - - # fetch all the data from the cursor - # The cursor will return the data as a - # Python Dictionary - theData = cursor.fetchall() - - # Create a HTTP Response object and add results of the query to it - # after "jasonify"-ing it. - response = make_response(jsonify(theData)) - # set the proper HTTP Status code of 200 (meaning all good) - response.status_code = 200 - # send the response back to the client - return response - -# ------------------------------------------------------------ -# get product information about a specific product -# notice that the route takes and then you see id -# as a parameter to the function. This is one way to send -# parameterized information into the route handler. -@products.route('/product/', methods=['GET']) -def get_product_detail (id): - - query = f'''SELECT id, - product_name, - description, - list_price, - category - FROM products - WHERE id = {str(id)} - ''' - - # logging the query for debugging purposes. - # The output will appear in the Docker logs output - # This line has nothing to do with actually executing the query... - # It is only for debugging purposes. - current_app.logger.info(f'GET /product/ query={query}') - - # get the database connection, execute the query, and - # fetch the results as a Python Dictionary - cursor = db.get_db().cursor() - cursor.execute(query) - theData = cursor.fetchall() - - # Another example of logging for debugging purposes. - # You can see if the data you're getting back is what you expect. - current_app.logger.info(f'GET /product/ Result of query = {theData}') - - response = make_response(jsonify(theData)) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# Get the top 5 most expensive products from the database -@products.route('/mostExpensive') -def get_most_pop_products(): - - query = ''' - SELECT product_code, - product_name, - list_price, - reorder_level - FROM products - ORDER BY list_price DESC - LIMIT 5 - ''' - - # Same process as handler above - cursor = db.get_db().cursor() - cursor.execute(query) - theData = cursor.fetchall() - - response = make_response(jsonify(theData)) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# Route to get the 10 most expensive items from the -# database. -@products.route('/tenMostExpensive', methods=['GET']) -def get_10_most_expensive_products(): - - query = ''' - SELECT product_code, - product_name, - list_price, - reorder_level - FROM products - ORDER BY list_price DESC - LIMIT 10 - ''' - - # Same process as above - cursor = db.get_db().cursor() - cursor.execute(query) - theData = cursor.fetchall() - - response = make_response(jsonify(theData)) - response.status_code = 200 - return response - - -# ------------------------------------------------------------ -# This is a POST route to add a new product. -# Remember, we are using POST routes to create new entries -# in the database. -@products.route('/product', methods=['POST']) -def add_new_product(): - - # In a POST request, there is a - # collecting data from the request object - the_data = request.json - current_app.logger.info(the_data) - - #extracting the variable - name = the_data['product_name'] - description = the_data['product_description'] - price = the_data['product_price'] - category = the_data['product_category'] - - query = f''' - INSERT INTO products (product_name, - description, - category, - list_price) - VALUES ('{name}', '{description}', '{category}', {str(price)}) - ''' - # TODO: Make sure the version of the query above works properly - # Constructing the query - # query = 'insert into products (product_name, description, category, list_price) values ("' - # query += name + '", "' - # query += description + '", "' - # query += category + '", ' - # query += str(price) + ')' - current_app.logger.info(query) - - # executing and committing the insert statement - cursor = db.get_db().cursor() - cursor.execute(query) - db.get_db().commit() - - response = make_response("Successfully added product") - response.status_code = 200 - return response - -# ------------------------------------------------------------ -### Get all product categories -@products.route('/categories', methods = ['GET']) -def get_all_categories(): - query = ''' - SELECT DISTINCT category AS label, category as value - FROM products - WHERE category IS NOT NULL - ORDER BY category - ''' - - cursor = db.get_db().cursor() - cursor.execute(query) - theData = cursor.fetchall() - - response = make_response(jsonify(theData)) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# This is a stubbed route to update a product in the catalog -# The SQL query would be an UPDATE. -@products.route('/product', methods = ['PUT']) -def update_product(): - product_info = request.json - current_app.logger.info(product_info) - - return "Success" \ No newline at end of file diff --git a/api/backend/rest_entry.py b/api/backend/rest_entry.py index d8d78502d9..3c3b1fefe6 100644 --- a/api/backend/rest_entry.py +++ b/api/backend/rest_entry.py @@ -1,48 +1,173 @@ -from flask import Flask +""" +BallWatch Basketball Analytics Platform +======================================= +CS 3200 - Summer 2 2025 +Team: StatPadders + +Flask REST API Entry Point +Provides comprehensive basketball analytics for teams and fans through +data-driven insights, player statistics, and strategic analysis. + +Author: StatPadders Team +""" -from backend.db_connection import db -from backend.customers.customer_routes import customers -from backend.products.products_routes import products -from backend.simple.simple_routes import simple_routes import os +from flask import Flask from dotenv import load_dotenv +# Database connection +from backend.db_connection import db + +# Basketball Analytics API Route Blueprints +from backend.core.basketball_routes import basketball +from backend.analytics.analytics_routes import analytics +from backend.strategy.strategy_routes import strategy +from backend.admin.admin_routes import admin + + def create_app(): + """ + Create and configure the BallWatch Flask application. + + Returns: + Flask: Configured Flask application instance + """ app = Flask(__name__) - - # Load environment variables - # This function reads all the values from inside - # the .env file (in the parent folder) so they - # are available in this file. See the MySQL setup - # commands below to see how they're being used. + + # Load environment configuration load_dotenv() + _configure_app(app) + + # Initialize database connection + _initialize_database(app) + + # Register API blueprints + _register_blueprints(app) + + # Log application setup completion + _log_startup_info(app) + + return app + - # secret key that will be used for securely signing the session - # cookie and can be used for any other security related needs by - # extensions or your application - # app.config['SECRET_KEY'] = 'someCrazyS3cR3T!Key.!' +def _configure_app(app): + """Configure Flask application settings.""" app.config['SECRET_KEY'] = os.getenv('SECRET_KEY') + + # MySQL Database Configuration + app.config['MYSQL_DATABASE_USER'] = os.getenv('DB_USER', '').strip() + app.config['MYSQL_DATABASE_PASSWORD'] = os.getenv('MYSQL_ROOT_PASSWORD', '').strip() + app.config['MYSQL_DATABASE_HOST'] = os.getenv('DB_HOST', 'localhost').strip() + app.config['MYSQL_DATABASE_PORT'] = int(os.getenv('DB_PORT', '3306').strip()) + app.config['MYSQL_DATABASE_DB'] = os.getenv('DB_NAME', 'BallWatch').strip() - # # these are for the DB object to be able to connect to MySQL. - # app.config['MYSQL_DATABASE_USER'] = 'root' - app.config['MYSQL_DATABASE_USER'] = os.getenv('DB_USER').strip() - app.config['MYSQL_DATABASE_PASSWORD'] = os.getenv('MYSQL_ROOT_PASSWORD').strip() - app.config['MYSQL_DATABASE_HOST'] = os.getenv('DB_HOST').strip() - app.config['MYSQL_DATABASE_PORT'] = int(os.getenv('DB_PORT').strip()) - app.config['MYSQL_DATABASE_DB'] = os.getenv('DB_NAME').strip() # Change this to your DB name - # Initialize the database object with the settings above. - app.logger.info('current_app(): starting the database connection') - db.init_app(app) +def _initialize_database(app): + """Initialize database connection with the Flask app.""" + app.logger.info('🏀 Initializing BallWatch database connection...') + try: + db.init_app(app) + app.logger.info('✅ Database connection established successfully') + except Exception as e: + app.logger.error(f'❌ Database connection failed: {e}') + raise - # Register the routes from each Blueprint with the app object - # and give a url prefix to each - app.logger.info('current_app(): registering blueprints with Flask app object.') - app.register_blueprint(simple_routes) - app.register_blueprint(customers, url_prefix='/c') - app.register_blueprint(products, url_prefix='/p') +def _register_blueprints(app): + """Register all API blueprints with the Flask application.""" + app.logger.info('📋 Registering BallWatch API blueprints...') + + # Core Basketball Analytics Routes with specific prefixes + blueprints = [ + (basketball, '/basketball', 'Core Basketball Operations (Players, Teams, Games)'), + (analytics, '/analytics', 'Performance Analytics & Comparisons'), + (strategy, '/strategy', 'Game Plans & Draft Evaluations'), + (admin, '/system', 'System Administration & Data Management') + ] + + for blueprint, prefix, description in blueprints: + app.register_blueprint(blueprint, url_prefix=prefix) + app.logger.info(f' ✓ {blueprint.name} ({prefix}): {description}') + + +def _log_startup_info(app): + """Log application startup information and available endpoints.""" + app.logger.info('🎯 BallWatch API routes registered successfully!') + app.logger.info('=' * 60) + app.logger.info('📊 BALLWATCH BASKETBALL ANALYTICS API') + app.logger.info('=' * 60) + + # Core Functionality Routes + app.logger.info('🏀 CORE BASKETBALL ROUTES:') + app.logger.info(' 📋 Players:') + app.logger.info(' GET /basketball/players - Get all players with filters') + app.logger.info(' POST /basketball/players - Add new player profile') + app.logger.info(' PUT /basketball/players/{id} - Update player information') + app.logger.info(' GET /basketball/players/{id}/stats - Get player performance stats') + app.logger.info(' PUT /basketball/players/{id}/stats - Update player game statistics') + + app.logger.info(' 🏟️ Teams:') + app.logger.info(' GET /basketball/teams - Get all teams with filters') + app.logger.info(' GET /basketball/teams/{id} - Get specific team details') + app.logger.info(' PUT /basketball/teams/{id} - Update team information') + app.logger.info(' GET /basketball/teams/{id}/players - Get team roster') + app.logger.info(' POST /basketball/teams/{id}/players - Add player to roster') + app.logger.info(' PUT /basketball/teams/{id}/players/{pid} - Update player status') + + app.logger.info(' 🏆 Games:') + app.logger.info(' GET /basketball/games - Get games schedule/results') + app.logger.info(' POST /basketball/games - Create new game') + app.logger.info(' GET /basketball/games/{id} - Get game details & stats') + app.logger.info(' PUT /basketball/games/{id} - Update game scores/info') + app.logger.info(' GET /basketball/games/upcoming - Get upcoming games') + + # Analytics & Intelligence Routes + app.logger.info('📈 ANALYTICS & INSIGHTS:') + app.logger.info(' 🔬 Performance Analytics:') + app.logger.info(' GET /analytics/player-comparisons - Side-by-side player analysis') + app.logger.info(' GET /analytics/player-matchups - Head-to-head matchup data') + app.logger.info(' GET /analytics/opponent-reports - Opponent scouting reports') + app.logger.info(' GET /analytics/lineup-configurations- Lineup effectiveness analysis') + app.logger.info(' GET /analytics/season-summaries - Season performance summaries') + + # Strategic Planning Routes + app.logger.info(' 🎯 Strategic Planning:') + app.logger.info(' GET /strategy/game-plans - Get strategic game plans') + app.logger.info(' POST /strategy/game-plans - Create new game plan') + app.logger.info(' PUT /strategy/game-plans/{id} - Update game plan') + app.logger.info(' GET /strategy/draft-evaluations - Get player draft rankings') + app.logger.info(' POST /strategy/draft-evaluations - Add player evaluation') + app.logger.info(' PUT /strategy/draft-evaluations/{id} - Update player rankings') + + # System Administration Routes + app.logger.info('⚙️ SYSTEM ADMINISTRATION:') + app.logger.info(' 🩺 System Health:') + app.logger.info(' GET /system/health - Get system status & metrics') + app.logger.info(' GET /system/data-loads - Get data load history') + app.logger.info(' POST /system/data-loads - Start new data load') + app.logger.info(' PUT /system/data-loads/{id} - Update load status') + + app.logger.info(' 🔍 Error Management:') + app.logger.info(' GET /system/error-logs - Get error log history') + app.logger.info(' POST /system/error-logs - Log new system error') + app.logger.info(' GET /system/data-errors - Get data validation errors') + app.logger.info(' GET /system/data-cleanup - Get cleanup schedules') + app.logger.info(' POST /system/data-cleanup - Schedule data cleanup') + app.logger.info(' GET /system/data-validation - Get validation reports') + app.logger.info(' POST /system/data-validation - Run validation checks') + + app.logger.info('=' * 60) + app.logger.info('🚀 BallWatch API Server Ready!') + app.logger.info(' 💡 Transforming NBA analytics into actionable insights') + app.logger.info(' 👥 Serving: Superfans | Data Engineers | Coaches | GMs') + app.logger.info('=' * 60) - # Don't forget to return the app object - return app +if __name__ == '__main__': + # Create and run the application + application = create_app() + application.run( + host='0.0.0.0', + port=int(os.getenv('FLASK_PORT', 4000)), + debug=os.getenv('FLASK_ENV') == 'development' + ) \ No newline at end of file diff --git a/api/backend/simple/playlist.py b/api/backend/simple/playlist.py deleted file mode 100644 index a9e7a9ef03..0000000000 --- a/api/backend/simple/playlist.py +++ /dev/null @@ -1,129 +0,0 @@ -# ------------------------------------------------------------ -# Sample data for testing generated by ChatGPT -# ------------------------------------------------------------ - -sample_playlist_data = { - "playlist": { - "id": "37i9dQZF1DXcBWIGoYBM5M", - "name": "Chill Hits", - "description": "Relax and unwind with the latest chill hits.", - "owner": { - "id": "spotify_user_123", - "display_name": "Spotify User" - }, - "tracks": { - "items": [ - { - "track": { - "id": "3n3Ppam7vgaVa1iaRUc9Lp", - "name": "Lose Yourself", - "artists": [ - { - "id": "1dfeR4HaWDbWqFHLkxsg1d", - "name": "Eminem" - } - ], - "album": { - "id": "1ATL5GLyefJaxhQzSPVrLX", - "name": "8 Mile" - }, - "duration_ms": 326000, - "track_number": 1, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/lose-yourself.mp3", - "uri": "spotify:track:3n3Ppam7vgaVa1iaRUc9Lp" - } - }, - { - "track": { - "id": "7ouMYWpwJ422jRcDASZB7P", - "name": "Blinding Lights", - "artists": [ - { - "id": "0fW8E0XdT6aG9aFh6jGpYo", - "name": "The Weeknd" - } - ], - "album": { - "id": "1ATL5GLyefJaxhQzSPVrLX", - "name": "After Hours" - }, - "duration_ms": 200040, - "track_number": 9, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/blinding-lights.mp3", - "uri": "spotify:track:7ouMYWpwJ422jRcDASZB7P" - } - }, - { - "track": { - "id": "4uLU6hMCjMI75M1A2tKUQC", - "name": "Shape of You", - "artists": [ - { - "id": "6eUKZXaKkcviH0Ku9w2n3V", - "name": "Ed Sheeran" - } - ], - "album": { - "id": "3fMbdgg4jU18AjLCKBhRSm", - "name": "Divide" - }, - "duration_ms": 233713, - "track_number": 4, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/shape-of-you.mp3", - "uri": "spotify:track:4uLU6hMCjMI75M1A2tKUQC" - } - }, - { - "track": { - "id": "0VjIjW4GlUZAMYd2vXMi3b", - "name": "Levitating", - "artists": [ - { - "id": "4tZwfgrHOc3mvqYlEYSvVi", - "name": "Dua Lipa" - } - ], - "album": { - "id": "7dGJo4pcD2V6oG8kP0tJRR", - "name": "Future Nostalgia" - }, - "duration_ms": 203693, - "track_number": 5, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/levitating.mp3", - "uri": "spotify:track:0VjIjW4GlUZAMYd2vXMi3b" - } - }, - { - "track": { - "id": "6habFhsOp2NvshLv26DqMb", - "name": "Sunflower", - "artists": [ - { - "id": "1dfeR4HaWDbWqFHLkxsg1d", - "name": "Post Malone" - }, - { - "id": "0C8ZW7ezQVs4URX5aX7Kqx", - "name": "Swae Lee" - } - ], - "album": { - "id": "6k3hyp4efgfHP5GMVd3Agw", - "name": "Spider-Man: Into the Spider-Verse (Soundtrack)" - }, - "duration_ms": 158000, - "track_number": 3, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/sunflower.mp3", - "uri": "spotify:track:6habFhsOp2NvshLv26DqMb" - } - } - ] - }, - "uri": "spotify:playlist:37i9dQZF1DXcBWIGoYBM5M" - } -} \ No newline at end of file diff --git a/api/backend/simple/simple_routes.py b/api/backend/simple/simple_routes.py deleted file mode 100644 index 8685fbac76..0000000000 --- a/api/backend/simple/simple_routes.py +++ /dev/null @@ -1,48 +0,0 @@ -from flask import Blueprint, request, jsonify, make_response, current_app, redirect, url_for -import json -from backend.db_connection import db -from backend.simple.playlist import sample_playlist_data - -# This blueprint handles some basic routes that you can use for testing -simple_routes = Blueprint('simple_routes', __name__) - - -# ------------------------------------------------------------ -# / is the most basic route -# Once the api container is started, in a browser, go to -# localhost:4000/playlist -@simple_routes.route('/') -def welcome(): - current_app.logger.info('GET / handler') - welcome_message = '

Welcome to the CS 3200 Project Template REST API' - response = make_response(welcome_message) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# /playlist returns the sample playlist data contained in playlist.py -# (imported above) -@simple_routes.route('/playlist') -def get_playlist_data(): - current_app.logger.info('GET /playlist handler') - response = make_response(jsonify(sample_playlist_data)) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -@simple_routes.route('/niceMesage', methods = ['GET']) -def affirmation(): - message = ''' -

Think about it...

-
- You only need to be 1% better today than you were yesterday! - ''' - response = make_response(message) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# Demonstrates how to redirect from one route to another. -@simple_routes.route('/message') -def mesage(): - return redirect(url_for(affirmation)) \ No newline at end of file diff --git a/api/backend/strategy/strategy_routes.py b/api/backend/strategy/strategy_routes.py new file mode 100644 index 0000000000..8f8ee06a26 --- /dev/null +++ b/api/backend/strategy/strategy_routes.py @@ -0,0 +1,708 @@ +""" +BallWatch Basketball Analytics Platform +======================================= +Strategic Planning Blueprint + +Game plans, draft evaluations, and strategic planning tools +for coaches and general managers making data-driven decisions. + +Author: StatPadders Team +Course: CS 3200 - Summer 2 2025 +""" + +from flask import Blueprint, request, jsonify, make_response, current_app +from backend.db_connection import db +from datetime import datetime + +# Create the Strategy Blueprint +strategy = Blueprint('strategy', __name__) + + +# ============================================================================ +# GAME PLANNING & STRATEGY ROUTES +# ============================================================================ + +@strategy.route('/game-plans', methods=['GET']) +def get_game_plans(): + """ + Get game strategies and tactical plans. + + Query Parameters: + team_id: Team ID (required) + opponent_id: Opponent team ID (optional) + game_id: Specific game ID (optional) + status: Plan status ('draft', 'active', 'archived') + + User Stories: [Marcus-3.5] + """ + try: + current_app.logger.info('GET /strategy/game-plans - Fetching strategic game plans') + + team_id = request.args.get('team_id', type=int) + opponent_id = request.args.get('opponent_id', type=int) + game_id = request.args.get('game_id', type=int) + status = request.args.get('status') + + if not team_id: + return make_response(jsonify({"error": "team_id is required"}), 400) + + cursor = db.get_db().cursor() + + # Build comprehensive game plans query + query = ''' + SELECT + gp.plan_id, + gp.team_id, + gp.opponent_id, + gp.game_id, + gp.plan_name, + gp.offensive_strategy, + gp.defensive_strategy, + gp.key_matchups, + gp.special_instructions, + gp.status, + gp.created_date, + gp.updated_date, + t.name AS team_name, + opp.name AS opponent_name, + g.game_date, + g.game_time + FROM GamePlans gp + LEFT JOIN Teams t ON gp.team_id = t.team_id + LEFT JOIN Teams opp ON gp.opponent_id = opp.team_id + LEFT JOIN Game g ON gp.game_id = g.game_id + WHERE gp.team_id = %s + ''' + + params = [team_id] + + # Apply optional filters + if opponent_id: + query += ' AND gp.opponent_id = %s' + params.append(opponent_id) + if game_id: + query += ' AND gp.game_id = %s' + params.append(game_id) + if status: + query += ' AND gp.status = %s' + params.append(status) + + query += ' ORDER BY gp.created_date DESC' + + cursor.execute(query, params) + game_plans = cursor.fetchall() + + response_data = { + 'game_plans': game_plans, + 'total_plans': len(game_plans), + 'filters': { + 'team_id': team_id, + 'opponent_id': opponent_id, + 'game_id': game_id, + 'status': status + } + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching game plans: {e}') + return make_response(jsonify({"error": "Failed to fetch game plans"}), 500) + + +@strategy.route('/game-plans', methods=['POST']) +def create_game_plan(): + """ + Create a new strategic game plan. + + Expected JSON Body: + { + "team_id": int (required), + "opponent_id": int, + "game_id": int, + "plan_name": "string" (required), + "offensive_strategy": "string", + "defensive_strategy": "string", + "key_matchups": "string", + "special_instructions": "string", + "status": "draft|active|archived" (default: "draft") + } + + User Stories: [Marcus-3.5] + """ + try: + current_app.logger.info('POST /strategy/game-plans - Creating new game plan') + + plan_data = request.get_json() + + # Validate required fields + required_fields = ['team_id', 'plan_name'] + for field in required_fields: + if field not in plan_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + # Validate status if provided + valid_statuses = ['draft', 'active', 'archived'] + if 'status' in plan_data and plan_data['status'] not in valid_statuses: + return make_response(jsonify({ + "error": f"Invalid status. Must be one of: {valid_statuses}" + }), 400) + + cursor = db.get_db().cursor() + + # Verify team exists + cursor.execute('SELECT team_id FROM Teams WHERE team_id = %s', (plan_data['team_id'],)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Team not found"}), 404) + + # Insert new strategic game plan + query = ''' + INSERT INTO GamePlans ( + team_id, opponent_id, game_id, plan_name, + offensive_strategy, defensive_strategy, key_matchups, + special_instructions, status, created_date, updated_date + ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW()) + ''' + + values = ( + plan_data['team_id'], + plan_data.get('opponent_id'), + plan_data.get('game_id'), + plan_data['plan_name'], + plan_data.get('offensive_strategy'), + plan_data.get('defensive_strategy'), + plan_data.get('key_matchups'), + plan_data.get('special_instructions'), + plan_data.get('status', 'draft') + ) + + cursor.execute(query, values) + db.get_db().commit() + + new_plan_id = cursor.lastrowid + + return make_response(jsonify({ + "message": "Game plan created successfully", + "plan_id": new_plan_id, + "plan_name": plan_data['plan_name'], + "status": plan_data.get('status', 'draft') + }), 201) + + except Exception as e: + current_app.logger.error(f'Error creating game plan: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to create game plan"}), 500) + + +@strategy.route('/game-plans/', methods=['PUT']) +def update_game_plan(plan_id): + """ + Update an existing strategic game plan. + + Expected JSON Body (all fields optional): + { + "plan_name": "string", + "offensive_strategy": "string", + "defensive_strategy": "string", + "key_matchups": "string", + "special_instructions": "string", + "status": "draft|active|archived" + } + + User Stories: [Marcus-3.5] + """ + try: + current_app.logger.info(f'PUT /strategy/game-plans/{plan_id} - Updating game plan') + + plan_data = request.get_json() + + if not plan_data: + return make_response(jsonify({"error": "No data provided for update"}), 400) + + # Validate status if provided + if 'status' in plan_data: + valid_statuses = ['draft', 'active', 'archived'] + if plan_data['status'] not in valid_statuses: + return make_response(jsonify({ + "error": f"Invalid status. Must be one of: {valid_statuses}" + }), 400) + + cursor = db.get_db().cursor() + + # Verify game plan exists + cursor.execute('SELECT plan_id FROM GamePlans WHERE plan_id = %s', (plan_id,)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Game plan not found"}), 404) + + # Build dynamic update query + update_fields = [] + values = [] + + allowed_fields = ['plan_name', 'offensive_strategy', 'defensive_strategy', + 'key_matchups', 'special_instructions', 'status'] + + for field in allowed_fields: + if field in plan_data: + update_fields.append(f'{field} = %s') + values.append(plan_data[field]) + + if not update_fields: + return make_response(jsonify({"error": "No valid fields to update"}), 400) + + # Always update the timestamp + update_fields.append('updated_date = NOW()') + + query = f"UPDATE GamePlans SET {', '.join(update_fields)} WHERE plan_id = %s" + values.append(plan_id) + + cursor.execute(query, values) + db.get_db().commit() + + return make_response(jsonify({ + "message": "Game plan updated successfully", + "plan_id": plan_id, + "updated_fields": list(plan_data.keys()) + }), 200) + + except Exception as e: + current_app.logger.error(f'Error updating game plan: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to update game plan"}), 500) + + +# ============================================================================ +# DRAFT EVALUATION & SCOUTING ROUTES +# ============================================================================ + +@strategy.route('/draft-evaluations', methods=['GET']) +def get_draft_evaluations(): + """ + Get player rankings and comprehensive draft evaluations. + + Query Parameters: + position: Filter by position + min_age: Minimum age filter + max_age: Maximum age filter + college: Filter by college + evaluation_type: 'prospect', 'free_agent', 'trade_target' + + User Stories: [Andre-4.5] + """ + try: + current_app.logger.info('GET /strategy/draft-evaluations - Fetching draft evaluations') + + position = request.args.get('position') + min_age = request.args.get('min_age', type=int) + max_age = request.args.get('max_age', type=int) + college = request.args.get('college') + evaluation_type = request.args.get('evaluation_type') + + cursor = db.get_db().cursor() + + # Get comprehensive player evaluations with performance data + query = ''' + SELECT + de.evaluation_id, + de.player_id, + p.first_name, + p.last_name, + p.position, + p.age, + p.college, + p.height, + p.weight, + de.overall_rating, + de.offensive_rating, + de.defensive_rating, + de.athleticism_rating, + de.potential_rating, + de.evaluation_type, + de.strengths, + de.weaknesses, + de.scout_notes, + de.projected_round, + de.comparison_player, + de.last_updated, + t.name AS current_team, + p.expected_salary, + p.current_salary, + COUNT(pgs.game_id) AS games_played, + ROUND(AVG(pgs.points), 1) AS avg_points, + ROUND(AVG(pgs.rebounds), 1) AS avg_rebounds, + ROUND(AVG(pgs.assists), 1) AS avg_assists + FROM DraftEvaluations de + JOIN Players p ON de.player_id = p.player_id + LEFT JOIN TeamsPlayers tp ON p.player_id = tp.player_id AND tp.left_date IS NULL + LEFT JOIN Teams t ON tp.team_id = t.team_id + LEFT JOIN PlayerGameStats pgs ON p.player_id = pgs.player_id + WHERE 1=1 + ''' + + params = [] + + # Apply filters dynamically + if position: + query += ' AND p.position = %s' + params.append(position) + if min_age is not None: + query += ' AND p.age >= %s' + params.append(min_age) + if max_age is not None: + query += ' AND p.age <= %s' + params.append(max_age) + if college: + query += ' AND p.college = %s' + params.append(college) + if evaluation_type: + query += ' AND de.evaluation_type = %s' + params.append(evaluation_type) + + query += ''' + GROUP BY de.evaluation_id, de.player_id, p.first_name, p.last_name, + p.position, p.age, p.college, p.height, p.weight, + de.overall_rating, de.offensive_rating, de.defensive_rating, + de.athleticism_rating, de.potential_rating, de.evaluation_type, + de.strengths, de.weaknesses, de.scout_notes, de.projected_round, + de.comparison_player, de.last_updated, t.name, p.expected_salary, p.current_salary + ORDER BY de.overall_rating DESC + ''' + + cursor.execute(query, params) + evaluations = cursor.fetchall() + + response_data = { + 'evaluations': evaluations, + 'total_evaluations': len(evaluations), + 'filters': { + 'position': position, + 'age_range': f"{min_age}-{max_age}" if min_age or max_age else None, + 'college': college, + 'evaluation_type': evaluation_type + } + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error fetching draft evaluations: {e}') + return make_response(jsonify({"error": "Failed to fetch draft evaluations"}), 500) + + +@strategy.route('/draft-evaluations', methods=['POST']) +def add_draft_evaluation(): + """ + Add a new player evaluation for draft/scouting purposes. + + Expected JSON Body: + { + "player_id": int (required), + "overall_rating": float (required, 0-100), + "offensive_rating": float, + "defensive_rating": float, + "athleticism_rating": float, + "potential_rating": float, + "evaluation_type": "prospect|free_agent|trade_target", + "strengths": "string", + "weaknesses": "string", + "scout_notes": "string", + "projected_round": int, + "comparison_player": "string" + } + + User Stories: [Andre-4.5] + """ + try: + current_app.logger.info('POST /strategy/draft-evaluations - Adding player evaluation') + + eval_data = request.get_json() + + # Validate required fields + required_fields = ['player_id', 'overall_rating'] + for field in required_fields: + if field not in eval_data: + return make_response(jsonify({"error": f"Missing required field: {field}"}), 400) + + # Validate rating ranges + if not 0 <= eval_data['overall_rating'] <= 100: + return make_response(jsonify({ + "error": "overall_rating must be between 0 and 100" + }), 400) + + # Validate evaluation type + valid_types = ['prospect', 'free_agent', 'trade_target'] + if 'evaluation_type' in eval_data and eval_data['evaluation_type'] not in valid_types: + return make_response(jsonify({ + "error": f"Invalid evaluation_type. Must be one of: {valid_types}" + }), 400) + + cursor = db.get_db().cursor() + + # Verify player exists + cursor.execute('SELECT player_id FROM Players WHERE player_id = %s', (eval_data['player_id'],)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Player not found"}), 404) + + # Check for existing evaluation + cursor.execute(''' + SELECT evaluation_id FROM DraftEvaluations + WHERE player_id = %s + ''', (eval_data['player_id'],)) + + if cursor.fetchone(): + return make_response(jsonify({ + "error": "Evaluation already exists for this player. Use PUT to update." + }), 409) + + # Insert new evaluation + query = ''' + INSERT INTO DraftEvaluations ( + player_id, overall_rating, offensive_rating, defensive_rating, + athleticism_rating, potential_rating, evaluation_type, + strengths, weaknesses, scout_notes, projected_round, + comparison_player, last_updated + ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW()) + ''' + + values = ( + eval_data['player_id'], + eval_data['overall_rating'], + eval_data.get('offensive_rating'), + eval_data.get('defensive_rating'), + eval_data.get('athleticism_rating'), + eval_data.get('potential_rating'), + eval_data.get('evaluation_type', 'prospect'), + eval_data.get('strengths'), + eval_data.get('weaknesses'), + eval_data.get('scout_notes'), + eval_data.get('projected_round'), + eval_data.get('comparison_player') + ) + + cursor.execute(query, values) + db.get_db().commit() + + new_eval_id = cursor.lastrowid + + return make_response(jsonify({ + "message": "Draft evaluation added successfully", + "evaluation_id": new_eval_id, + "player_id": eval_data['player_id'], + "overall_rating": eval_data['overall_rating'] + }), 201) + + except Exception as e: + current_app.logger.error(f'Error adding draft evaluation: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to add draft evaluation"}), 500) + + +@strategy.route('/draft-evaluations/', methods=['PUT']) +def update_draft_evaluation(evaluation_id): + """ + Update existing player evaluation and rankings. + + Expected JSON Body (all fields optional): + { + "overall_rating": float (0-100), + "offensive_rating": float, + "defensive_rating": float, + "athleticism_rating": float, + "potential_rating": float, + "evaluation_type": "prospect|free_agent|trade_target", + "strengths": "string", + "weaknesses": "string", + "scout_notes": "string", + "projected_round": int, + "comparison_player": "string" + } + + User Stories: [Andre-4.5] + """ + try: + current_app.logger.info(f'PUT /strategy/draft-evaluations/{evaluation_id} - Updating evaluation') + + eval_data = request.get_json() + + if not eval_data: + return make_response(jsonify({"error": "No data provided for update"}), 400) + + # Validate rating ranges if provided + rating_fields = ['overall_rating', 'offensive_rating', 'defensive_rating', + 'athleticism_rating', 'potential_rating'] + + for field in rating_fields: + if field in eval_data: + if not 0 <= eval_data[field] <= 100: + return make_response(jsonify({ + "error": f"{field} must be between 0 and 100" + }), 400) + + # Validate evaluation type + if 'evaluation_type' in eval_data: + valid_types = ['prospect', 'free_agent', 'trade_target'] + if eval_data['evaluation_type'] not in valid_types: + return make_response(jsonify({ + "error": f"Invalid evaluation_type. Must be one of: {valid_types}" + }), 400) + + cursor = db.get_db().cursor() + + # Verify evaluation exists + cursor.execute('SELECT evaluation_id FROM DraftEvaluations WHERE evaluation_id = %s', (evaluation_id,)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Evaluation not found"}), 404) + + # Build dynamic update query + update_fields = [] + values = [] + + allowed_fields = ['overall_rating', 'offensive_rating', 'defensive_rating', + 'athleticism_rating', 'potential_rating', 'evaluation_type', + 'strengths', 'weaknesses', 'scout_notes', 'projected_round', + 'comparison_player'] + + for field in allowed_fields: + if field in eval_data: + update_fields.append(f'{field} = %s') + values.append(eval_data[field]) + + if not update_fields: + return make_response(jsonify({"error": "No valid fields to update"}), 400) + + # Always update the timestamp + update_fields.append('last_updated = NOW()') + + query = f"UPDATE DraftEvaluations SET {', '.join(update_fields)} WHERE evaluation_id = %s" + values.append(evaluation_id) + + cursor.execute(query, values) + db.get_db().commit() + + return make_response(jsonify({ + "message": "Draft evaluation updated successfully", + "evaluation_id": evaluation_id, + "updated_fields": list(eval_data.keys()) + }), 200) + + except Exception as e: + current_app.logger.error(f'Error updating draft evaluation: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to update draft evaluation"}), 500) + + +@strategy.route('/draft-evaluations/', methods=['DELETE']) +def delete_draft_evaluation(evaluation_id): + """ + Delete a draft evaluation. + + User Stories: [Andre-4.5] (Evaluation management) + """ + try: + current_app.logger.info(f'DELETE /strategy/draft-evaluations/{evaluation_id} - Removing evaluation') + + cursor = db.get_db().cursor() + + # Verify evaluation exists + cursor.execute('SELECT evaluation_id FROM DraftEvaluations WHERE evaluation_id = %s', (evaluation_id,)) + if not cursor.fetchone(): + return make_response(jsonify({"error": "Evaluation not found"}), 404) + + # Delete the evaluation + cursor.execute('DELETE FROM DraftEvaluations WHERE evaluation_id = %s', (evaluation_id,)) + db.get_db().commit() + + return make_response(jsonify({ + "message": "Draft evaluation deleted successfully", + "evaluation_id": evaluation_id + }), 200) + + except Exception as e: + current_app.logger.error(f'Error deleting draft evaluation: {e}') + db.get_db().rollback() + return make_response(jsonify({"error": "Failed to delete draft evaluation"}), 500) + + +@strategy.route('/contract-analysis', methods=['GET']) +def get_contract_analysis(): + """ + Get contract efficiency analysis for roster management. + + Query Parameters: + team_id: Optional team filter + position: Optional position filter + min_salary: Minimum salary threshold + + User Stories: [Andre-4.6] (Contract efficiency metrics) + """ + try: + current_app.logger.info('GET /strategy/contract-analysis - Analyzing contract efficiency') + + team_id = request.args.get('team_id', type=int) + position = request.args.get('position') + min_salary = request.args.get('min_salary', type=float) + + cursor = db.get_db().cursor() + + # Get contract efficiency metrics + query = ''' + SELECT + p.player_id, + p.first_name, + p.last_name, + p.position, + p.current_salary, + p.expected_salary, + t.name AS current_team, + COUNT(pgs.game_id) AS games_played, + ROUND(AVG(pgs.points + pgs.rebounds + pgs.assists), 1) AS total_production, + ROUND(AVG(pgs.points + pgs.rebounds + pgs.assists) / (p.current_salary / 1000000), 2) AS production_per_million, + CASE + WHEN p.current_salary > p.expected_salary * 1.15 THEN 'Overpaid' + WHEN p.current_salary < p.expected_salary * 0.85 THEN 'Bargain' + ELSE 'Fair Value' + END AS contract_assessment, + ROUND(((p.expected_salary - p.current_salary) / p.current_salary) * 100, 1) AS value_percentage + FROM Players p + LEFT JOIN TeamsPlayers tp ON p.player_id = tp.player_id AND tp.left_date IS NULL + LEFT JOIN Teams t ON tp.team_id = t.team_id + LEFT JOIN PlayerGameStats pgs ON p.player_id = pgs.player_id + WHERE p.current_salary > 0 + ''' + + params = [] + + if team_id: + query += ' AND t.team_id = %s' + params.append(team_id) + if position: + query += ' AND p.position = %s' + params.append(position) + if min_salary: + query += ' AND p.current_salary >= %s' + params.append(min_salary) + + query += ''' + GROUP BY p.player_id, p.first_name, p.last_name, p.position, + p.current_salary, p.expected_salary, t.name + HAVING games_played >= 5 + ORDER BY production_per_million DESC + ''' + + cursor.execute(query, params) + contract_analysis = cursor.fetchall() + + response_data = { + 'contract_analysis': contract_analysis, + 'total_analyzed': len(contract_analysis), + 'filters': { + 'team_id': team_id, + 'position': position, + 'min_salary': min_salary + } + } + + return make_response(jsonify(response_data), 200) + + except Exception as e: + current_app.logger.error(f'Error analyzing contracts: {e}') + return make_response(jsonify({"error": "Failed to analyze contracts"}), 500) \ No newline at end of file diff --git a/app/src/Home.py b/app/src/Home.py index ef0f7b19ad..6e54ade3bc 100644 --- a/app/src/Home.py +++ b/app/src/Home.py @@ -34,44 +34,64 @@ # set the title of the page and provide a simple prompt. logger.info("Loading the Home page of the app") -st.title('CS 3200 Sample Semester Project App') +st.title('BallWatch Basketball Analytics Application') st.write('\n\n') -st.write('### HI! As which user would you like to log in?') +st.write('### Select a user to log in:') -# For each of the user personas for which we are implementing -# functionality, we put a button on the screen that the user -# can click to MIMIC logging in as that mock user. - -if st.button("Act as John, a Political Strategy Advisor", - type = 'primary', - use_container_width=True): - # when user clicks the button, they are now considered authenticated - st.session_state['authenticated'] = True - # we set the role of the current user - st.session_state['role'] = 'pol_strat_advisor' - # we add the first name of the user (so it can be displayed on - # subsequent pages). - st.session_state['first_name'] = 'John' - # finally, we ask streamlit to switch to another page, in this case, the - # landing page for this particular user type - logger.info("Logging in as Political Strategy Advisor Persona") - st.switch_page('pages/00_Pol_Strat_Home.py') - -if st.button('Act as Mohammad, an USAID worker', - type = 'primary', - use_container_width=True): - st.session_state['authenticated'] = True - st.session_state['role'] = 'usaid_worker' - st.session_state['first_name'] = 'Mohammad' - st.switch_page('pages/10_USAID_Worker_Home.py') - -if st.button('Act as System Administrator', - type = 'primary', - use_container_width=True): - st.session_state['authenticated'] = True - st.session_state['role'] = 'administrator' - st.session_state['first_name'] = 'SysAdmin' - st.switch_page('pages/20_Admin_Home.py') +# Create a dropdown menu for user selection +user_options = { + "Johnny Evans - The Superfan": { + "role": "superfan", + "first_name": "Johnny", + "page": "pages/10_Superfan_Home.py", #update this + "bio": "Johnny Evans (25M) is an avid basketball fan who stays up to date with all his favorite players and teams. He finds typical basketball media sources too surface-level and appreciates an analytical approach to the game. On top of this, he likes to do some sports betting in his free time, and is always looking to find an edge." + }, + "Mike Lewis - Data Engineer": { + "role": "data_engineer", + "first_name": "Mike", + "page": "pages/20_Data_Engineer_Home.py", + "bio": "Mike has a B.S. in Computer Science and 7 years of experience as a data engineer, specializing in real-time data pipelines and sports analytics. He's passionate about basketball and joined BallWatch to help elevate the game through better backend systems. Mike's primary responsibility is to ensure that BallWatch's data infrastructure stays reliable, accurate, and scalable. He manages ingestion from live APIs, updates datasets post-game, and occasionally runs manual queries for analysts or coaches. He also supports feature development by designing new tables or optimizing old ones." + }, + "Marcus Thompson - Head Coach": { + "role": "head_coach", + "first_name": "Marcus", + "page": "pages/30_Head_Coach_Home.py", #update this + "bio": "Marcus Thompson is the new head coach of the Nets, and together with the new GM, he wants to bring analytical basketball to Brooklyn. During his previous head coaching stints, he was often bogged down by dense spreadsheets and large PDF reports that were difficult to digest in between games. To coach effectively, he needs actionable insights and clear recommendations that help him plan strategies and make adjustments on the fly. This approach allows him to communicate confidently with his players while making decisions he knows are backed by solid statistics." + }, + "Andre Wu - General Manager": { + "role": "general_manager", + "first_name": "Andre", + "page": "pages/40_General_Manager_Home.py", #update this + "bio": "Andre Wu is the new general manager for the Brooklyn Nets. Historically plagued with losing seasons, Brooklyn is tired of losing and has high expectations for Andre Wu in his first season as the organization's general manager. Andre Wu plans to rely heavily on advanced analytics and statistics to help rebuild the Nets organization." + } +} +selected_user = st.selectbox( + "Choose a user:", + options=list(user_options.keys()), + index=0 +) +# Display bio for selected user +if selected_user: + user_info = user_options[selected_user] + st.write("---") + st.write(f"**About {user_info['first_name']}:**") + st.write(user_info['bio']) + st.write("---") +# Login button +if st.button("Login", type='primary', use_container_width=True): + if selected_user: + user_info = user_options[selected_user] + + # Set session state variables + st.session_state['authenticated'] = True + st.session_state['role'] = user_info['role'] + st.session_state['first_name'] = user_info['first_name'] + + # Log the login action + logger.info(f"Logging in as {selected_user}") + + # Navigate to the appropriate page + st.switch_page(user_info['page']) \ No newline at end of file diff --git a/app/src/assets/README.md b/app/src/assets/README.md deleted file mode 100644 index 91737827ec..0000000000 --- a/app/src/assets/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# `assets` Directory - -A place for some static assets for your app. Images, etc. - -TODO: Refactor assets folder into static folder per https://docs.streamlit.io/develop/concepts/configuration/serving-static-files \ No newline at end of file diff --git a/app/src/assets/dishant.jpeg b/app/src/assets/dishant.jpeg new file mode 100644 index 0000000000..a893a1ecf7 Binary files /dev/null and b/app/src/assets/dishant.jpeg differ diff --git a/app/src/assets/drew.jpg b/app/src/assets/drew.jpg new file mode 100644 index 0000000000..b8f7c240d5 Binary files /dev/null and b/app/src/assets/drew.jpg differ diff --git a/app/src/assets/frank.jpg b/app/src/assets/frank.jpg new file mode 100644 index 0000000000..e662a31ad6 Binary files /dev/null and b/app/src/assets/frank.jpg differ diff --git a/app/src/assets/logo.png b/app/src/assets/logo.png index f3f01f835b..5b2e927774 100644 Binary files a/app/src/assets/logo.png and b/app/src/assets/logo.png differ diff --git a/app/src/assets/vince.jpeg b/app/src/assets/vince.jpeg new file mode 100644 index 0000000000..ce6fc7f38c Binary files /dev/null and b/app/src/assets/vince.jpeg differ diff --git a/app/src/assets/wes.jpeg b/app/src/assets/wes.jpeg new file mode 100644 index 0000000000..fe18107c27 Binary files /dev/null and b/app/src/assets/wes.jpeg differ diff --git a/app/src/modules/nav.py b/app/src/modules/nav.py index cb31d3bf67..5e99dc3540 100644 --- a/app/src/modules/nav.py +++ b/app/src/modules/nav.py @@ -11,7 +11,7 @@ def HomeNav(): def AboutPageNav(): - st.sidebar.page_link("pages/30_About.py", label="About", icon="🧠") + st.sidebar.page_link("pages/99_About.py", label="About", icon="🧠") #### ------------------------ Examples for Role of pol_strat_advisor ------------------------ diff --git a/app/src/output.csv b/app/src/output.csv deleted file mode 100644 index 8923c1ed04..0000000000 --- a/app/src/output.csv +++ /dev/null @@ -1,701 +0,0 @@ -Player,Pos,Age,Tm,G,GS,MP,FG,FGA,FG%,3P,3PA,3P%,2P,2PA,2P%,eFG%,FT,FTA,FT%,ORB,DRB,TRB,AST,STL,BLK,TOV,PF,PTS -Álex Abrines,SG,25,OKC,31,2,19.0,1.8,5.1,.357,1.3,4.1,.323,0.5,1.0,.500,.487,0.4,0.4,.923,0.2,1.4,1.5,0.6,0.5,0.2,0.5,1.7,5.3 -Quincy Acy,PF,28,PHO,10,0,12.3,0.4,1.8,.222,0.2,1.5,.133,0.2,0.3,.667,.278,0.7,1.0,.700,0.3,2.2,2.5,0.8,0.1,0.4,0.4,2.4,1.7 -Jaylen Adams,PG,22,ATL,34,1,12.6,1.1,3.2,.345,0.7,2.2,.338,0.4,1.1,.361,.459,0.2,0.3,.778,0.3,1.4,1.8,1.9,0.4,0.1,0.8,1.3,3.2 -Steven Adams,C,25,OKC,80,80,33.4,6.0,10.1,.595,0.0,0.0,.000,6.0,10.1,.596,.595,1.8,3.7,.500,4.9,4.6,9.5,1.6,1.5,1.0,1.7,2.6,13.9 -Bam Adebayo,C,21,MIA,82,28,23.3,3.4,5.9,.576,0.0,0.2,.200,3.4,5.7,.588,.579,2.0,2.8,.735,2.0,5.3,7.3,2.2,0.9,0.8,1.5,2.5,8.9 -Deng Adel,SF,21,CLE,19,3,10.2,0.6,1.9,.306,0.3,1.2,.261,0.3,0.7,.385,.389,0.2,0.2,1.000,0.2,0.8,1.0,0.3,0.1,0.2,0.3,0.7,1.7 -DeVaughn Akoon-Purcell,SG,25,DEN,7,0,3.1,0.4,1.4,.300,0.0,0.6,.000,0.4,0.9,.500,.300,0.1,0.3,.500,0.1,0.4,0.6,0.9,0.3,0.0,0.3,0.6,1.0 -LaMarcus Aldridge,C,33,SAS,81,81,33.2,8.4,16.3,.519,0.1,0.5,.238,8.3,15.8,.528,.522,4.3,5.1,.847,3.1,6.1,9.2,2.4,0.5,1.3,1.8,2.2,21.3 -Rawle Alkins,SG,21,CHI,10,1,12.0,1.3,3.9,.333,0.3,1.2,.250,1.0,2.7,.370,.372,0.8,1.2,.667,1.1,1.5,2.6,1.3,0.1,0.0,0.8,0.7,3.7 -Grayson Allen,SG,23,UTA,38,2,10.9,1.8,4.7,.376,0.8,2.6,.323,0.9,2.1,.443,.466,1.2,1.6,.750,0.1,0.5,0.6,0.7,0.2,0.2,0.9,1.2,5.6 -Jarrett Allen,C,20,BRK,80,80,26.2,4.2,7.1,.590,0.1,0.6,.133,4.1,6.5,.629,.595,2.5,3.5,.709,2.4,6.0,8.4,1.4,0.5,1.5,1.3,2.3,10.9 -Kadeem Allen,PG,26,NYK,19,1,21.9,3.4,7.4,.461,0.9,1.9,.472,2.5,5.5,.457,.521,2.2,2.8,.778,0.4,2.3,2.7,4.0,0.8,0.2,1.4,2.4,9.9 -Al-Farouq Aminu,PF,28,POR,81,81,28.3,3.2,7.3,.433,1.2,3.5,.343,2.0,3.9,.514,.514,1.9,2.1,.867,1.4,6.1,7.5,1.3,0.8,0.4,0.9,1.8,9.4 -Justin Anderson,SF,25,ATL,48,4,9.6,1.3,3.3,.408,0.5,1.6,.312,0.8,1.7,.500,.484,0.5,0.7,.743,0.5,1.3,1.8,0.5,0.5,0.3,0.5,1.0,3.7 -Kyle Anderson,SF,25,MEM,43,40,29.8,3.5,6.4,.543,0.2,0.8,.265,3.3,5.6,.583,.560,0.9,1.5,.578,1.1,4.7,5.8,3.0,1.3,0.9,1.3,2.6,8.0 -Ryan Anderson,PF,30,TOT,25,8,12.9,0.8,2.8,.304,0.4,1.6,.225,0.5,1.2,.414,.370,0.5,0.6,.750,0.7,1.4,2.2,0.8,0.2,0.0,0.6,1.0,2.5 -Ryan Anderson,PF,30,PHO,15,8,18.5,1.3,4.0,.317,0.5,2.3,.206,0.8,1.7,.462,.375,0.7,0.9,.786,0.9,2.1,3.0,1.1,0.2,0.1,0.8,1.5,3.7 -Ryan Anderson,PF,30,MIA,10,0,4.4,0.2,0.9,.222,0.2,0.6,.333,0.0,0.3,.000,.333,0.1,0.2,.500,0.4,0.5,0.9,0.2,0.1,0.0,0.2,0.2,0.7 -Ike Anigbogu,C,20,IND,3,0,2.0,0.0,1.0,.000,0.0,0.0,0,0.0,1.0,.000,.000,0.0,0.0,0,0.3,0.7,1.0,0.3,0.0,0.3,0.3,0.0,0.0 -Giannis Antetokounmpo,PF,24,MIL,72,72,32.8,10.0,17.3,.578,0.7,2.8,.256,9.3,14.5,.641,.599,6.9,9.5,.729,2.2,10.3,12.5,5.9,1.3,1.5,3.7,3.2,27.7 -Kostas Antetokounmpo,PF,21,DAL,2,0,5.5,0.0,1.5,.000,0.0,0.0,0,0.0,1.5,.000,.000,1.0,2.0,.500,0.0,0.5,0.5,0.0,1.0,0.0,0.5,0.0,1.0 -Carmelo Anthony,PF,34,HOU,10,2,29.4,4.9,12.1,.405,2.1,6.4,.328,2.8,5.7,.491,.492,1.5,2.2,.682,0.9,4.5,5.4,0.5,0.4,0.7,0.8,3.2,13.4 -OG Anunoby,SF,21,TOR,67,6,20.2,2.7,6.0,.453,1.0,3.0,.332,1.7,3.0,.574,.536,0.5,0.9,.581,0.9,2.1,2.9,0.7,0.7,0.3,0.8,2.1,7.0 -Ryan Arcidiacono,PG,24,CHI,81,32,24.2,2.3,5.2,.447,1.0,2.7,.373,1.3,2.5,.527,.544,1.1,1.3,.873,0.3,2.4,2.7,3.3,0.8,0.0,0.8,2.1,6.7 -Trevor Ariza,SF,33,TOT,69,69,34.0,4.3,10.7,.399,2.1,6.3,.334,2.2,4.4,.493,.498,1.9,2.4,.793,0.7,4.7,5.4,3.7,1.3,0.3,1.5,1.9,12.5 -Trevor Ariza,SF,33,PHO,26,26,34.0,3.3,8.7,.379,1.9,5.3,.360,1.4,3.4,.409,.489,1.4,1.7,.837,0.6,5.0,5.6,3.3,1.5,0.3,1.5,1.7,9.9 -Trevor Ariza,SF,33,WAS,43,43,34.1,4.8,11.8,.409,2.2,6.9,.322,2.6,5.0,.528,.502,2.2,2.8,.777,0.8,4.5,5.3,3.8,1.2,0.3,1.6,2.0,14.1 -D.J. Augustin,PG,31,ORL,81,81,28.0,3.9,8.4,.470,1.6,3.8,.421,2.3,4.5,.511,.566,2.2,2.6,.866,0.5,2.0,2.5,5.3,0.6,0.0,1.6,1.4,11.7 -Deandre Ayton,C,20,PHO,71,70,30.7,7.2,12.3,.585,0.0,0.1,.000,7.2,12.2,.588,.585,2.0,2.7,.746,3.1,7.1,10.3,1.8,0.9,0.9,1.8,2.9,16.3 -Dwayne Bacon,SG,23,CHO,43,13,17.7,2.8,6.0,.475,0.9,2.0,.437,2.0,4.0,.494,.549,0.8,1.1,.739,0.2,1.9,2.1,1.1,0.3,0.1,0.4,1.7,7.3 -Marvin Bagley III,PF,19,SAC,62,4,25.3,5.7,11.4,.504,0.5,1.5,.313,5.3,9.8,.534,.525,2.9,4.2,.691,2.6,5.0,7.6,1.0,0.5,1.0,1.6,1.9,14.9 -Ron Baker,SG,25,TOT,15,0,10.1,0.3,1.3,.200,0.1,0.9,.077,0.2,0.5,.429,.225,0.3,0.4,.833,0.1,0.7,0.7,1.0,0.4,0.1,0.3,1.2,0.9 -Ron Baker,SG,25,NYK,11,0,9.7,0.4,1.5,.250,0.1,0.8,.111,0.3,0.6,.429,.281,0.5,0.5,.833,0.1,0.5,0.6,1.2,0.5,0.0,0.3,1.5,1.3 -Ron Baker,SG,25,WAS,4,0,11.3,0.0,1.0,.000,0.0,1.0,.000,0.0,0.0,0,.000,0.0,0.0,0,0.0,1.0,1.0,0.5,0.3,0.3,0.5,0.5,0.0 -Wade Baldwin,PG,22,POR,16,0,5.9,0.6,2.1,.303,0.1,0.6,.222,0.5,1.5,.333,.333,0.5,0.7,.727,0.1,0.8,0.9,0.8,0.1,0.1,0.9,0.7,1.9 -Lonzo Ball,PG,21,LAL,47,45,30.3,3.9,9.7,.406,1.6,4.9,.329,2.3,4.9,.482,.488,0.4,1.0,.417,1.1,4.2,5.3,5.4,1.5,0.4,2.2,2.4,9.9 -Mo Bamba,C,20,ORL,47,1,16.3,2.5,5.2,.481,0.4,1.5,.300,2.0,3.7,.555,.525,0.8,1.3,.587,1.4,3.6,5.0,0.8,0.3,1.4,0.9,2.2,6.2 -J.J. Barea,PG,34,DAL,38,0,19.8,4.2,10.1,.418,1.0,3.4,.297,3.2,6.8,.479,.468,1.4,2.1,.705,0.3,2.2,2.5,5.6,0.6,0.0,1.9,1.3,10.9 -Harrison Barnes,PF,26,DAL,49,49,32.3,5.9,14.6,.404,2.5,6.3,.389,3.4,8.3,.416,.489,3.4,4.0,.833,0.7,3.5,4.2,1.3,0.7,0.2,1.4,1.6,17.7 -Harrison Barnes,SF,26,SAC,28,28,33.9,5.0,11.1,.455,1.9,4.6,.408,3.1,6.4,.489,.540,2.3,2.9,.800,0.8,4.8,5.5,1.9,0.6,0.1,1.1,1.5,14.3 -Will Barton,SF,28,DEN,43,38,27.7,4.3,10.7,.402,1.6,4.6,.342,2.7,6.1,.447,.475,1.3,1.7,.770,0.7,3.9,4.6,2.9,0.4,0.5,1.5,1.9,11.5 -Keita Bates-Diop,SF,23,MIN,30,3,16.8,2.0,4.7,.423,0.4,1.7,.250,1.6,3.0,.522,.468,0.6,0.9,.643,0.5,2.2,2.8,0.6,0.6,0.5,0.5,1.0,5.0 -Nicolas Batum,SF,30,CHO,75,72,31.4,3.4,7.5,.450,1.5,4.0,.389,1.8,3.5,.519,.553,1.0,1.2,.865,0.9,4.3,5.2,3.3,0.9,0.6,1.6,1.9,9.3 -Jerryd Bayless,PG,30,MIN,34,6,19.3,2.4,6.8,.357,0.9,2.9,.296,1.6,3.9,.402,.420,0.5,0.8,.571,0.3,1.5,1.8,3.5,0.5,0.1,0.9,1.6,6.1 -Aron Baynes,C,32,BOS,51,18,16.1,2.1,4.4,.471,0.4,1.2,.344,1.6,3.2,.519,.518,1.0,1.2,.855,1.7,3.0,4.7,1.1,0.2,0.7,0.8,2.5,5.6 -Kent Bazemore,SG,29,ATL,67,35,24.5,4.1,10.3,.402,1.4,4.5,.320,2.7,5.8,.465,.472,1.9,2.6,.726,0.6,3.3,3.9,2.3,1.3,0.6,1.8,2.5,11.6 -Bradley Beal,SG,25,WAS,82,82,36.9,9.3,19.6,.475,2.5,7.3,.351,6.8,12.4,.548,.540,4.4,5.5,.808,1.1,3.9,5.0,5.5,1.5,0.7,2.7,2.8,25.6 -Malik Beasley,SG,22,DEN,81,18,23.2,4.3,9.1,.474,2.0,5.0,.402,2.3,4.1,.560,.584,0.7,0.8,.848,0.4,2.0,2.5,1.2,0.7,0.1,0.7,1.4,11.3 -Michael Beasley,PF,30,LAL,26,2,10.7,2.9,5.9,.490,0.1,0.7,.176,2.8,5.2,.529,.500,1.1,1.5,.718,0.5,1.8,2.3,1.0,0.3,0.4,1.0,1.6,7.0 -Marco Belinelli,SG,32,SAS,79,1,23.0,3.6,8.7,.413,1.9,5.0,.372,1.7,3.7,.468,.520,1.4,1.6,.903,0.2,2.3,2.5,1.7,0.4,0.1,0.9,1.5,10.5 -Jordan Bell,C,24,GSW,68,3,11.6,1.5,2.8,.516,0.0,0.0,.000,1.5,2.8,.521,.516,0.4,0.6,.610,0.8,1.9,2.7,1.1,0.3,0.8,0.6,1.2,3.3 -DeAndre' Bembry,SG,24,ATL,82,15,23.5,3.4,7.5,.446,0.6,2.1,.289,2.7,5.4,.508,.487,1.1,1.7,.640,0.7,3.7,4.4,2.5,1.3,0.5,1.7,2.3,8.4 -Dragan Bender,PF,21,PHO,46,27,18.0,1.9,4.3,.447,0.5,2.2,.218,1.4,2.1,.688,.503,0.7,1.2,.593,0.7,3.2,4.0,1.2,0.4,0.5,0.8,2.0,5.0 -Dairis Bertāns,SG,29,NOP,12,0,13.9,1.0,3.9,.255,0.8,2.8,.294,0.2,1.1,.154,.362,0.0,0.0,0,0.2,0.6,0.8,0.8,0.1,0.0,0.2,0.6,2.8 -Dāvis Bertāns,PF,26,SAS,76,12,21.5,2.7,6.0,.450,1.9,4.4,.429,0.8,1.5,.513,.610,0.7,0.8,.883,0.3,3.2,3.5,1.3,0.5,0.4,0.6,1.8,8.0 -Patrick Beverley,PG,30,LAC,78,49,27.4,2.5,6.1,.407,1.4,3.6,.397,1.1,2.5,.421,.524,1.2,1.6,.780,1.0,4.0,5.0,3.8,0.9,0.6,1.1,3.4,7.6 -Khem Birch,C,26,ORL,50,1,12.9,1.8,3.0,.603,0.0,0.0,.000,1.8,3.0,.607,.603,1.2,1.7,.699,1.6,2.2,3.8,0.8,0.4,0.6,0.4,1.4,4.8 -Bismack Biyombo,C,26,CHO,54,32,14.5,1.6,2.9,.571,0.0,0.0,0,1.6,2.9,.571,.571,1.1,1.7,.637,1.5,3.1,4.6,0.6,0.2,0.8,0.6,1.9,4.4 -Nemanja Bjelica,PF,30,SAC,77,70,23.2,3.7,7.7,.479,1.3,3.3,.401,2.4,4.4,.539,.566,0.9,1.2,.761,1.6,4.1,5.8,1.9,0.7,0.7,1.1,2.6,9.6 -Antonio Blakeney,SG,22,CHI,57,3,14.5,2.9,6.9,.419,0.6,1.6,.396,2.3,5.4,.426,.465,0.9,1.3,.658,0.1,1.7,1.9,0.7,0.2,0.2,0.6,0.7,7.3 -Eric Bledsoe,PG,29,MIL,78,78,29.1,6.0,12.4,.484,1.6,4.8,.329,4.4,7.6,.582,.548,2.3,3.0,.750,1.1,3.6,4.6,5.5,1.5,0.4,2.1,2.0,15.9 -Jaron Blossomgame,SF,25,CLE,27,4,16.3,1.7,3.9,.443,0.4,1.4,.256,1.4,2.5,.552,.491,0.4,0.5,.769,1.0,2.7,3.6,0.5,0.3,0.3,0.4,0.7,4.2 -Bogdan Bogdanović,SG,26,SAC,70,17,27.8,5.2,12.3,.418,1.9,5.3,.360,3.2,7.0,.462,.496,1.9,2.3,.827,0.6,2.9,3.5,3.8,1.0,0.2,1.7,2.0,14.1 -Bojan Bogdanović,SF,29,IND,81,81,31.8,6.4,13.0,.497,2.0,4.8,.425,4.4,8.2,.538,.575,3.0,3.8,.807,0.4,3.7,4.1,2.0,0.9,0.0,1.7,1.7,18.0 -Andrew Bogut,C,34,GSW,11,5,12.2,1.6,3.3,.500,0.0,0.0,0,1.6,3.3,.500,.500,0.3,0.3,1.000,1.1,3.9,5.0,1.0,0.3,0.7,0.7,2.0,3.5 -Jonah Bolden,PF,23,PHI,44,10,14.5,1.8,3.7,.494,0.8,2.2,.354,1.0,1.5,.697,.599,0.3,0.6,.481,1.1,2.7,3.8,0.9,0.4,0.9,0.8,2.3,4.7 -Isaac Bonga,PG,19,LAL,22,0,5.5,0.2,1.5,.152,0.0,0.4,.000,0.2,1.1,.200,.152,0.4,0.7,.600,0.4,0.7,1.1,0.7,0.4,0.2,0.3,0.4,0.9 -Devin Booker,SG,22,PHO,64,64,35.0,9.2,19.6,.467,2.1,6.5,.326,7.0,13.1,.536,.521,6.1,7.1,.866,0.6,3.5,4.1,6.8,0.9,0.2,4.1,3.1,26.6 -Chris Boucher,PF,26,TOR,28,0,5.8,1.2,2.7,.447,0.4,1.3,.324,0.8,1.4,.564,.526,0.5,0.5,.867,0.6,1.4,2.0,0.1,0.2,0.9,0.3,1.1,3.3 -Avery Bradley,SG,28,TOT,63,63,30.2,3.9,9.7,.408,1.4,3.9,.351,2.6,5.8,.446,.479,0.7,0.8,.860,0.7,2.1,2.8,2.4,0.7,0.3,1.4,2.7,9.9 -Avery Bradley,SG,28,LAC,49,49,29.9,3.3,8.6,.383,1.2,3.5,.337,2.1,5.1,.415,.452,0.4,0.5,.800,0.7,2.0,2.7,2.0,0.6,0.3,1.2,2.7,8.2 -Avery Bradley,SG,28,MEM,14,14,31.6,6.2,13.4,.463,2.0,5.2,.384,4.2,8.2,.513,.537,1.6,1.8,.920,0.6,2.6,3.1,4.0,1.0,0.0,2.0,2.4,16.1 -Tony Bradley,C,21,UTA,3,0,12.0,2.7,5.3,.500,0.0,0.0,0,2.7,5.3,.500,.500,0.3,0.7,.500,3.0,2.0,5.0,0.3,0.7,0.7,1.0,2.0,5.7 -Corey Brewer,SF,32,TOT,31,3,15.9,1.7,4.0,.431,0.5,1.4,.318,1.3,2.5,.494,.488,1.0,1.4,.721,0.8,1.6,2.5,1.3,1.0,0.2,0.6,2.0,4.9 -Corey Brewer,SF,32,PHI,7,3,20.0,2.9,7.0,.408,0.6,2.0,.286,2.3,5.0,.457,.449,1.3,1.9,.692,0.6,1.9,2.4,1.4,1.7,0.3,1.1,2.3,7.6 -Corey Brewer,SF,32,SAC,24,0,14.7,1.4,3.1,.446,0.4,1.3,.333,1.0,1.8,.523,.514,0.9,1.3,.733,0.9,1.5,2.5,1.2,0.8,0.2,0.5,2.0,4.1 -Mikal Bridges,SF,22,PHO,82,56,29.5,3.0,6.9,.430,1.3,3.8,.335,1.7,3.0,.548,.523,1.2,1.4,.805,0.7,2.5,3.2,2.1,1.6,0.5,0.9,2.5,8.3 -Miles Bridges,SF,20,CHO,80,25,21.2,3.0,6.4,.464,0.8,2.5,.325,2.2,3.9,.553,.527,0.7,1.0,.753,0.8,3.2,4.0,1.2,0.7,0.6,0.6,1.4,7.5 -Isaiah Briscoe,PG,22,ORL,39,0,14.3,1.4,3.5,.399,0.3,0.9,.324,1.1,2.7,.423,.438,0.4,0.7,.577,0.1,1.8,1.9,2.2,0.3,0.1,0.8,1.7,3.5 -Ryan Broekhoff,SG,28,DAL,42,0,10.8,1.4,3.0,.452,0.9,2.2,.409,0.5,0.8,.576,.603,0.4,0.5,.789,0.2,1.3,1.5,0.5,0.1,0.1,0.4,0.8,4.0 -Malcolm Brogdon,SG,26,MIL,64,64,28.6,5.9,11.7,.505,1.6,3.8,.426,4.3,7.9,.544,.575,2.2,2.4,.928,1.0,3.5,4.5,3.2,0.7,0.2,1.4,1.6,15.6 -Dillon Brooks,SF,23,MEM,18,0,18.3,2.7,6.8,.402,0.8,2.2,.375,1.9,4.6,.415,.463,1.2,1.7,.733,0.5,1.2,1.7,0.9,0.6,0.2,1.1,2.8,7.5 -MarShon Brooks,SG,30,MEM,29,0,13.3,2.6,5.8,.450,0.5,1.9,.278,2.1,4.0,.530,.494,0.8,1.1,.697,0.4,1.1,1.6,0.9,0.3,0.1,0.7,1.1,6.6 -Bruce Brown,SG,22,DET,74,56,19.6,1.7,4.2,.398,0.3,1.3,.258,1.4,3.0,.457,.436,0.6,0.8,.750,0.6,1.9,2.5,1.2,0.5,0.5,0.6,2.4,4.3 -Jaylen Brown,SG,22,BOS,74,25,25.9,5.0,10.7,.465,1.3,3.7,.344,3.7,7.0,.529,.525,1.8,2.7,.658,0.9,3.4,4.2,1.4,0.9,0.4,1.3,2.5,13.0 -Lorenzo Brown,PG,28,TOR,26,0,8.2,0.9,2.7,.324,0.2,1.1,.214,0.7,1.7,.395,.366,0.1,0.1,1.000,0.2,1.0,1.2,1.1,0.5,0.2,0.6,0.8,2.1 -Sterling Brown,SG,23,MIL,58,7,17.8,2.5,5.4,.465,0.9,2.5,.361,1.6,2.8,.558,.550,0.5,0.7,.690,0.5,2.7,3.2,1.4,0.4,0.1,0.8,1.5,6.4 -Troy Brown Jr.,SF,19,WAS,52,10,14.0,1.9,4.5,.415,0.4,1.3,.319,1.4,3.2,.455,.462,0.6,0.9,.681,0.7,2.1,2.8,1.5,0.4,0.1,0.6,1.1,4.8 -Jalen Brunson,PG,22,DAL,73,38,21.8,3.6,7.7,.467,0.9,2.5,.348,2.8,5.3,.523,.523,1.2,1.6,.725,0.3,2.0,2.3,3.2,0.5,0.1,1.2,1.7,9.3 -Thomas Bryant,C,21,WAS,72,53,20.8,4.3,7.0,.616,0.5,1.4,.333,3.8,5.6,.685,.648,1.5,1.9,.781,1.6,4.7,6.3,1.3,0.3,0.9,0.8,1.8,10.5 -Reggie Bullock,SG,27,TOT,63,60,29.8,3.9,9.4,.412,2.3,6.2,.377,1.5,3.2,.483,.537,1.2,1.3,.859,0.3,2.4,2.7,2.0,0.6,0.2,1.0,1.7,11.3 -Reggie Bullock,SG,27,DET,44,44,30.8,4.1,10.0,.413,2.6,6.7,.388,1.5,3.3,.463,.542,1.3,1.5,.875,0.5,2.3,2.8,2.5,0.5,0.1,1.2,1.8,12.1 -Reggie Bullock,SG,27,LAL,19,16,27.6,3.3,8.1,.412,1.8,5.2,.343,1.5,2.8,.537,.523,0.9,1.1,.810,0.1,2.5,2.6,1.1,0.8,0.4,0.6,1.6,9.3 -Trey Burke,PG,26,TOT,58,8,19.4,4.1,9.4,.431,1.0,2.7,.352,3.1,6.7,.463,.482,1.8,2.1,.831,0.5,1.2,1.7,2.7,0.6,0.1,0.8,1.0,10.9 -Trey Burke,PG,26,NYK,33,7,20.9,4.5,10.8,.413,0.9,2.6,.349,3.6,8.2,.434,.455,1.9,2.3,.827,0.5,1.4,1.9,2.8,0.6,0.2,0.9,1.0,11.8 -Trey Burke,PG,26,DAL,25,1,17.4,3.5,7.6,.463,1.0,2.9,.356,2.5,4.7,.530,.532,1.6,2.0,.837,0.5,1.0,1.5,2.6,0.5,0.1,0.8,1.0,9.7 -Alec Burks,SG,27,TOT,64,24,21.5,3.0,7.4,.405,1.0,2.6,.363,2.0,4.8,.428,.469,1.8,2.2,.823,0.5,3.2,3.7,2.0,0.6,0.3,1.0,1.4,8.8 -Alec Burks,SG,27,UTA,17,0,15.8,2.8,6.7,.412,0.9,2.5,.372,1.8,4.2,.437,.482,1.9,2.2,.868,0.1,1.5,1.6,1.2,0.4,0.2,0.9,1.3,8.4 -Alec Burks,SG,27,CLE,34,24,28.8,4.0,10.0,.400,1.3,3.5,.378,2.7,6.5,.412,.466,2.3,2.9,.806,0.8,4.7,5.5,2.9,0.7,0.5,1.4,1.8,11.6 -Alec Burks,SG,27,SAC,13,0,9.8,0.7,1.5,.450,0.0,0.5,.000,0.7,1.1,.643,.450,0.3,0.4,.800,0.2,1.5,1.7,0.8,0.6,0.1,0.3,0.5,1.7 -Deonte Burton,SG,25,OKC,32,0,7.5,1.0,2.6,.402,0.3,0.8,.296,0.8,1.7,.455,.451,0.3,0.4,.667,0.1,0.8,0.9,0.3,0.2,0.3,0.3,1.0,2.6 -Jimmy Butler,SG,29,MIN,10,10,36.1,7.4,15.7,.471,1.7,4.5,.378,5.7,11.2,.509,.525,4.8,6.1,.787,1.6,3.6,5.2,4.3,2.4,1.0,1.4,1.8,21.3 -Jimmy Butler,SF,29,PHI,55,55,33.2,6.3,13.6,.461,0.9,2.7,.338,5.3,10.9,.491,.494,4.8,5.5,.868,1.9,3.4,5.3,4.0,1.8,0.5,1.5,1.7,18.2 -Bruno Caboclo,PF,23,MEM,34,19,23.5,2.8,6.6,.427,1.4,3.8,.369,1.4,2.8,.505,.533,1.2,1.5,.840,1.2,3.4,4.6,1.5,0.4,1.0,1.1,2.4,8.3 -José Calderón,PG,37,DET,49,0,12.9,0.9,2.4,.375,0.3,1.2,.246,0.6,1.3,.492,.433,0.2,0.2,.818,0.2,1.0,1.2,2.3,0.3,0.1,0.7,1.3,2.3 -Kentavious Caldwell-Pope,SG,25,LAL,82,23,24.8,4.0,9.2,.430,1.8,5.3,.347,2.1,3.9,.542,.530,1.7,1.9,.867,0.6,2.3,2.9,1.3,0.9,0.2,0.8,1.7,11.4 -Isaiah Canaan,PG,27,TOT,30,16,21.0,2.1,5.5,.390,1.1,3.2,.354,1.0,2.3,.441,.494,0.6,0.8,.792,0.2,1.7,1.9,2.8,0.5,0.1,1.2,1.7,6.0 -Isaiah Canaan,PG,27,PHO,19,15,26.5,2.7,6.8,.395,1.3,3.8,.347,1.4,3.0,.456,.492,0.8,1.1,.750,0.3,2.3,2.6,3.3,0.6,0.0,1.5,2.4,7.5 -Isaiah Canaan,PG,27,MIN,7,1,13.6,1.6,4.1,.379,1.0,2.7,.368,0.6,1.4,.400,.500,0.6,0.6,1.000,0.0,0.7,0.7,2.7,0.3,0.1,0.6,0.6,4.7 -Isaiah Canaan,PG,27,MIL,4,0,7.8,0.5,1.5,.333,0.5,1.3,.400,0.0,0.3,.000,.500,0.0,0.0,0,0.3,0.8,1.0,0.8,0.0,0.3,0.5,0.5,1.5 -Clint Capela,C,24,HOU,67,67,33.6,7.1,10.9,.648,0.0,0.0,0,7.1,10.9,.648,.648,2.5,3.9,.636,4.4,8.2,12.7,1.4,0.7,1.5,1.4,2.5,16.6 -DeMarre Carroll,PF,32,BRK,67,8,25.4,3.4,8.6,.395,1.6,4.6,.342,1.8,4.0,.457,.487,2.7,3.6,.760,1.0,4.2,5.2,1.3,0.5,0.1,1.1,1.7,11.1 -Jevon Carter,PG,23,MEM,39,3,14.8,1.4,4.7,.303,0.9,2.6,.333,0.6,2.1,.265,.395,0.7,0.8,.813,0.4,1.3,1.7,1.8,0.7,0.3,0.8,1.4,4.4 -Vince Carter,PF,42,ATL,76,9,17.5,2.6,6.2,.419,1.6,4.2,.389,1.0,2.0,.480,.550,0.6,0.9,.712,0.4,2.1,2.6,1.1,0.6,0.4,0.6,1.9,7.4 -Wendell Carter Jr.,C,19,CHI,44,44,25.2,4.1,8.4,.485,0.1,0.7,.188,4.0,7.7,.513,.493,2.0,2.5,.795,2.0,5.0,7.0,1.8,0.6,1.3,1.5,3.5,10.3 -Michael Carter-Williams,PG,27,TOT,28,1,13.3,1.6,4.4,.374,0.4,1.4,.263,1.3,3.0,.424,.415,1.1,1.9,.604,0.7,1.8,2.5,2.5,0.7,0.5,0.7,1.7,4.8 -Michael Carter-Williams,PG,27,HOU,16,1,9.1,1.6,3.8,.410,0.4,1.2,.368,1.1,2.6,.429,.467,0.8,1.6,.462,0.2,0.6,0.8,1.3,0.6,0.4,0.6,1.7,4.3 -Michael Carter-Williams,PG,27,ORL,12,0,18.9,1.8,5.2,.339,0.3,1.6,.158,1.5,3.6,.419,.363,1.7,2.3,.741,1.3,3.4,4.8,4.1,0.9,0.8,0.8,1.8,5.4 -Alex Caruso,PG,24,LAL,25,4,21.2,3.1,6.9,.445,1.0,2.0,.480,2.1,4.9,.431,.514,2.0,2.6,.797,0.8,1.9,2.7,3.1,1.0,0.4,1.7,2.2,9.2 -Omri Casspi,SF,30,MEM,36,0,14.4,2.4,4.5,.534,0.4,1.2,.349,2.0,3.3,.602,.581,1.1,1.6,.672,0.5,2.7,3.2,0.7,0.6,0.3,0.6,1.0,6.3 -Willie Cauley-Stein,C,25,SAC,81,81,27.3,5.1,9.1,.556,0.0,0.0,.500,5.1,9.1,.556,.557,1.7,3.1,.551,2.2,6.1,8.4,2.4,1.2,0.6,1.0,2.8,11.9 -Troy Caupain,PG,23,ORL,4,0,4.0,1.0,2.0,.500,0.5,0.8,.667,0.5,1.3,.400,.625,0.0,0.0,0,0.3,0.5,0.8,1.0,0.3,0.0,0.0,0.0,2.5 -Tyler Cavanaugh,PF,24,UTA,11,0,3.5,0.3,0.9,.300,0.1,0.5,.200,0.2,0.5,.400,.350,0.2,0.2,1.000,0.3,0.5,0.7,0.1,0.0,0.0,0.1,0.3,0.8 -Tyson Chandler,C,36,TOT,55,6,15.9,1.1,1.8,.616,0.0,0.0,.000,1.1,1.8,.622,.616,0.9,1.6,.586,1.7,3.9,5.6,0.7,0.4,0.4,0.8,2.0,3.1 -Tyson Chandler,C,36,PHO,7,0,12.7,1.1,1.7,.667,0.0,0.0,0,1.1,1.7,.667,.667,1.4,2.6,.556,1.0,4.6,5.6,0.9,0.3,0.1,1.0,3.0,3.7 -Tyson Chandler,C,36,LAL,48,6,16.4,1.1,1.8,.609,0.0,0.0,.000,1.1,1.8,.616,.609,0.9,1.4,.594,1.8,3.8,5.6,0.6,0.4,0.5,0.7,1.9,3.1 -Wilson Chandler,PF,31,PHI,36,32,26.4,2.5,5.8,.440,1.3,3.3,.390,1.3,2.5,.506,.551,0.4,0.5,.722,1.2,3.4,4.7,2.0,0.6,0.5,1.1,2.6,6.7 -Wilson Chandler,SF,31,LAC,15,1,15.1,1.5,4.4,.348,0.9,2.7,.325,0.7,1.7,.385,.447,0.3,0.5,.714,0.3,2.9,3.1,0.7,0.2,0.2,0.5,1.9,4.3 -Joe Chealey,PG,23,CHO,1,0,8.0,1.0,3.0,.333,0.0,0.0,0,1.0,3.0,.333,.333,0.0,0.0,0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,2.0 -Chris Chiozza,PG,23,HOU,7,0,4.7,0.3,1.1,.250,0.3,0.7,.400,0.0,0.4,.000,.375,0.0,0.0,0,0.1,0.4,0.6,0.6,0.1,0.1,0.1,0.3,0.9 -Marquese Chriss,PF,21,TOT,43,2,11.6,1.6,4.2,.372,0.4,1.7,.222,1.2,2.5,.472,.417,0.7,1.0,.711,0.9,2.4,3.3,0.5,0.4,0.3,0.8,1.9,4.2 -Marquese Chriss,PF,21,HOU,16,0,6.5,0.7,2.1,.324,0.1,0.9,.067,0.6,1.2,.526,.338,0.4,0.4,.857,0.4,1.3,1.8,0.4,0.1,0.3,0.8,1.1,1.8 -Marquese Chriss,PF,21,CLE,27,2,14.6,2.1,5.4,.384,0.6,2.1,.263,1.5,3.3,.461,.435,1.0,1.4,.684,1.2,3.0,4.2,0.6,0.6,0.3,0.9,2.4,5.7 -Gary Clark,PF,24,HOU,51,2,12.6,1.0,3.0,.331,0.8,2.7,.297,0.2,0.3,.692,.467,0.1,0.1,1.000,0.5,1.8,2.3,0.4,0.4,0.5,0.1,0.9,2.9 -Ian Clark,SG,27,NOP,60,6,16.2,2.5,6.4,.394,1.1,3.4,.327,1.4,3.0,.470,.480,0.6,0.6,.892,0.2,1.3,1.5,1.6,0.4,0.1,1.0,1.6,6.7 -Jordan Clarkson,SG,26,CLE,81,0,27.3,6.5,14.6,.448,1.8,5.5,.324,4.8,9.1,.524,.509,2.0,2.4,.844,1.0,2.3,3.3,2.4,0.7,0.2,1.7,1.4,16.8 -John Collins,PF,21,ATL,61,59,30.0,7.6,13.6,.560,0.9,2.6,.348,6.7,11.0,.609,.593,3.3,4.4,.763,3.6,6.2,9.8,2.0,0.4,0.6,2.0,3.3,19.5 -Zach Collins,C,21,POR,77,0,17.6,2.5,5.2,.473,0.5,1.6,.331,1.9,3.6,.534,.523,1.2,1.6,.746,1.4,2.8,4.2,0.9,0.3,0.9,1.0,2.3,6.6 -Darren Collison,PG,31,IND,76,76,28.2,4.1,8.7,.467,1.0,2.6,.407,3.0,6.1,.492,.527,2.1,2.5,.832,0.5,2.6,3.1,6.0,1.4,0.1,1.6,1.8,11.2 -Bonzie Colson,SF,23,MIL,8,2,12.3,1.6,4.9,.333,0.6,2.6,.238,1.0,2.3,.444,.397,1.0,1.1,.889,1.0,2.8,3.8,0.4,0.6,0.1,0.4,0.4,4.9 -Mike Conley,PG,31,MEM,70,70,33.5,7.0,16.0,.438,2.2,6.1,.364,4.8,9.9,.483,.507,4.9,5.8,.845,0.6,2.8,3.4,6.4,1.3,0.3,1.9,1.8,21.1 -Pat Connaughton,SG,26,MIL,61,2,20.7,2.7,5.7,.466,1.1,3.3,.330,1.6,2.5,.647,.560,0.5,0.7,.725,1.0,3.2,4.2,2.0,0.5,0.4,0.5,1.3,6.9 -Quinn Cook,PG,25,GSW,74,10,14.3,2.8,5.9,.465,1.1,2.7,.405,1.7,3.2,.515,.557,0.3,0.4,.769,0.3,1.8,2.1,1.6,0.3,0.0,0.7,1.2,6.9 -DeMarcus Cousins,C,28,GSW,30,30,25.7,5.9,12.4,.480,0.9,3.2,.274,5.1,9.2,.551,.515,3.5,4.8,.736,1.4,6.8,8.2,3.6,1.3,1.5,2.4,3.6,16.3 -Robert Covington,SF,28,TOT,35,35,34.4,4.5,10.3,.431,2.4,6.4,.378,2.0,3.9,.518,.548,1.9,2.5,.764,0.8,4.7,5.5,1.3,2.1,1.3,1.3,3.6,13.3 -Robert Covington,SF,28,PHI,13,13,33.8,3.8,9.0,.427,2.3,5.9,.390,1.5,3.1,.500,.556,1.3,1.8,.739,0.5,4.6,5.2,1.1,1.8,1.8,1.7,3.5,11.3 -Robert Covington,SF,28,MIN,22,22,34.7,4.8,11.1,.433,2.5,6.7,.372,2.3,4.4,.526,.545,2.3,3.0,.773,1.0,4.8,5.7,1.5,2.3,1.1,1.1,3.7,14.5 -Allen Crabbe,SG,26,BRK,43,20,26.3,3.2,8.7,.367,2.3,6.0,.378,0.9,2.7,.342,.499,1.0,1.3,.732,0.4,3.1,3.4,1.1,0.5,0.3,1.1,2.4,9.6 -Torrey Craig,SF,28,DEN,75,37,20.0,2.1,4.8,.442,0.8,2.5,.324,1.3,2.3,.569,.526,0.7,0.9,.700,1.2,2.3,3.5,1.0,0.5,0.6,0.6,2.3,5.7 -Jamal Crawford,SG,38,PHO,64,0,18.9,2.7,6.8,.397,1.0,3.2,.332,1.7,3.7,.453,.474,1.5,1.7,.845,0.1,1.2,1.3,3.6,0.5,0.2,1.5,1.2,7.9 -Mitch Creek,SF,26,TOT,5,0,9.6,1.6,3.2,.500,0.0,0.4,.000,1.6,2.8,.571,.500,1.0,1.4,.714,1.2,1.2,2.4,1.2,0.4,0.0,0.2,0.6,4.2 -Mitch Creek,SF,26,BRK,4,0,9.0,1.3,2.5,.500,0.0,0.3,.000,1.3,2.3,.556,.500,1.3,1.8,.714,1.3,1.3,2.5,1.3,0.3,0.0,0.0,0.8,3.8 -Mitch Creek,SF,26,MIN,1,0,12.0,3.0,6.0,.500,0.0,1.0,.000,3.0,5.0,.600,.500,0.0,0.0,0,1.0,1.0,2.0,1.0,1.0,0.0,1.0,0.0,6.0 -Jae Crowder,SF,28,UTA,80,11,27.1,4.0,10.0,.399,2.2,6.5,.331,1.8,3.4,.527,.508,1.8,2.5,.721,0.8,4.1,4.8,1.7,0.8,0.4,1.1,2.1,11.9 -Dante Cunningham,PF,31,SAS,64,21,14.5,1.2,2.5,.475,0.5,1.0,.462,0.7,1.5,.484,.570,0.2,0.3,.778,0.8,2.2,2.9,0.8,0.4,0.2,0.3,1.1,3.0 -Seth Curry,SG,28,POR,74,2,18.9,2.9,6.3,.456,1.5,3.4,.450,1.3,2.9,.463,.577,0.6,0.7,.846,0.4,1.3,1.6,0.9,0.5,0.2,0.8,1.3,7.9 -Stephen Curry,PG,30,GSW,69,69,33.8,9.2,19.4,.472,5.1,11.7,.437,4.0,7.7,.525,.604,3.8,4.2,.916,0.7,4.7,5.3,5.2,1.3,0.4,2.8,2.4,27.3 -Troy Daniels,SG,27,PHO,51,1,14.9,2.2,5.4,.411,1.5,3.8,.381,0.8,1.6,.481,.545,0.4,0.5,.783,0.3,1.2,1.4,0.5,0.5,0.1,0.5,1.5,6.2 -Anthony Davis,C,25,NOP,56,56,33.0,9.5,18.3,.517,0.9,2.6,.331,8.6,15.7,.547,.540,6.1,7.7,.794,3.1,8.9,12.0,3.9,1.6,2.4,2.0,2.4,25.9 -Deyonta Davis,C,22,ATL,9,0,13.1,1.7,2.4,.682,0.0,0.1,.000,1.7,2.3,.714,.682,0.7,1.1,.600,1.1,2.9,4.0,0.6,0.3,0.6,0.3,1.3,4.0 -Ed Davis,C,29,BRK,81,1,17.9,2.3,3.7,.616,0.0,0.0,.000,2.3,3.7,.620,.616,1.2,2.0,.617,2.7,5.9,8.6,0.8,0.4,0.4,0.8,2.8,5.8 -Tyler Davis,C,21,OKC,1,0,1.0,0.0,1.0,.000,0.0,0.0,0,0.0,1.0,.000,.000,0.0,0.0,0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0 -Dewayne Dedmon,C,29,ATL,64,52,25.1,4.0,8.2,.492,1.3,3.4,.382,2.8,4.8,.570,.571,1.4,1.8,.814,1.6,5.9,7.5,1.4,1.1,1.1,1.3,3.3,10.8 -Sam Dekker,PF,24,TOT,47,5,16.8,2.6,5.4,.469,0.4,1.3,.306,2.1,4.1,.521,.506,0.6,1.0,.609,1.1,2.0,3.1,1.0,0.8,0.1,0.5,1.0,6.1 -Sam Dekker,PF,24,CLE,9,5,18.8,2.4,5.3,.458,0.6,1.4,.385,1.9,3.9,.486,.510,0.9,1.1,.800,1.0,2.7,3.7,1.0,1.2,0.0,0.6,1.4,6.3 -Sam Dekker,PF,24,WAS,38,0,16.3,2.6,5.5,.471,0.4,1.3,.286,2.2,4.2,.528,.505,0.5,0.9,.556,1.2,1.9,3.0,1.0,0.7,0.2,0.5,0.9,6.1 -Ángel Delgado,C,24,LAC,2,0,7.5,0.5,2.5,.200,0.0,0.0,0,0.5,2.5,.200,.200,0.5,1.0,.500,1.0,1.0,2.0,0.0,0.5,0.0,0.0,1.0,1.5 -Matthew Dellavedova,PG,28,TOT,48,0,16.9,2.0,5.0,.405,0.9,2.7,.338,1.1,2.3,.482,.496,0.9,1.1,.808,0.1,1.5,1.6,3.8,0.3,0.0,1.4,1.6,5.9 -Matthew Dellavedova,PG,28,MIL,12,0,8.1,0.5,1.6,.316,0.3,0.9,.364,0.2,0.7,.250,.421,0.3,0.3,1.000,0.0,0.8,0.8,2.4,0.2,0.0,0.9,0.8,1.7 -Matthew Dellavedova,PG,28,CLE,36,0,19.9,2.6,6.2,.413,1.1,3.3,.336,1.4,2.9,.500,.502,1.1,1.3,.792,0.2,1.7,1.9,4.2,0.3,0.1,1.6,1.8,7.3 -Luol Deng,SF,33,MIN,22,2,17.8,2.7,5.4,.500,0.6,2.0,.318,2.0,3.4,.608,.559,1.1,1.6,.714,0.9,2.4,3.3,0.8,0.7,0.4,0.6,1.1,7.1 -DeMar DeRozan,SG,29,SAS,77,77,34.9,8.2,17.1,.481,0.1,0.6,.156,8.1,16.5,.492,.483,4.8,5.7,.830,0.7,5.3,6.0,6.2,1.1,0.5,2.6,2.3,21.2 -Marcus Derrickson,PF,22,GSW,11,0,6.1,1.5,3.0,.485,0.9,1.8,.500,0.5,1.2,.462,.636,0.4,0.5,.800,0.7,0.5,1.2,0.1,0.0,0.1,0.3,0.8,4.2 -Cheick Diallo,PF,22,NOP,64,1,14.0,2.6,4.2,.620,0.0,0.1,.250,2.6,4.2,.625,.622,0.8,1.0,.746,1.2,4.0,5.2,0.5,0.5,0.5,0.8,1.8,6.0 -Hamidou Diallo,SG,20,OKC,51,3,10.3,1.5,3.2,.455,0.1,0.5,.167,1.4,2.8,.504,.467,0.7,1.2,.610,0.7,1.2,1.9,0.3,0.4,0.2,0.5,1.5,3.7 -Gorgui Dieng,C,29,MIN,76,2,13.6,2.5,5.0,.501,0.3,0.7,.339,2.2,4.2,.530,.527,1.2,1.4,.830,1.1,3.0,4.1,0.9,0.6,0.5,0.8,1.8,6.4 -Spencer Dinwiddie,PG,25,BRK,68,4,28.1,5.4,12.2,.442,1.8,5.4,.335,3.6,6.7,.528,.517,4.2,5.2,.806,0.4,2.1,2.4,4.6,0.6,0.3,2.2,2.8,16.8 -Donte DiVincenzo,SG,22,MIL,27,0,15.2,1.9,4.6,.403,0.8,3.1,.265,1.0,1.5,.683,.492,0.3,0.4,.750,0.6,1.8,2.4,1.1,0.5,0.2,0.7,1.4,4.9 -Luka Dončić,SG,19,DAL,72,72,32.2,7.0,16.5,.427,2.3,7.1,.327,4.7,9.3,.503,.497,4.8,6.7,.713,1.2,6.6,7.8,6.0,1.1,0.3,3.4,1.9,21.2 -Tyler Dorsey,SG,22,TOT,48,11,14.5,2.2,5.5,.405,0.9,2.8,.333,1.3,2.8,.477,.489,0.8,1.3,.623,0.6,1.8,2.4,1.2,0.3,0.0,0.6,1.1,6.2 -Tyler Dorsey,SG,22,ATL,27,0,9.3,1.2,3.3,.360,0.4,1.4,.256,0.8,1.9,.440,.416,0.6,1.0,.615,0.3,1.3,1.6,0.6,0.3,0.0,0.4,0.7,3.3 -Tyler Dorsey,SG,22,MEM,21,11,21.3,3.6,8.3,.429,1.6,4.4,.366,2.0,3.9,.500,.526,1.0,1.7,.629,0.9,2.5,3.3,1.9,0.3,0.0,0.8,1.6,9.8 -Damyean Dotson,SG,24,NYK,73,40,27.5,4.0,9.6,.415,1.7,4.7,.368,2.2,4.9,.461,.506,1.0,1.3,.745,0.5,3.1,3.6,1.8,0.8,0.1,1.0,1.8,10.7 -PJ Dozier,SG,22,BOS,6,0,8.5,1.3,3.5,.381,0.3,1.3,.250,1.0,2.2,.462,.429,0.2,0.3,.500,1.0,1.8,2.8,0.8,0.3,0.0,0.0,0.3,3.2 -Goran Dragić,PG,32,MIA,36,22,27.5,5.0,12.0,.413,1.6,4.6,.348,3.4,7.5,.454,.479,2.2,2.8,.782,0.6,2.5,3.1,4.8,0.8,0.1,2.0,2.3,13.7 -Andre Drummond,C,25,DET,79,79,33.5,7.1,13.3,.533,0.1,0.5,.132,7.0,12.8,.548,.536,3.1,5.2,.590,5.4,10.2,15.6,1.4,1.7,1.7,2.2,3.4,17.3 -Jared Dudley,PF,33,BRK,59,25,20.7,1.7,4.1,.423,0.9,2.6,.351,0.8,1.5,.545,.533,0.5,0.8,.696,0.6,2.1,2.6,1.4,0.6,0.3,0.7,2.2,4.9 -Kris Dunn,PG,24,CHI,46,44,30.2,4.7,11.0,.425,0.7,2.1,.354,3.9,8.9,.441,.458,1.2,1.5,.797,0.4,3.7,4.1,6.0,1.5,0.5,2.3,3.6,11.3 -Kevin Durant,SF,30,GSW,78,78,34.6,9.2,17.7,.521,1.8,5.0,.353,7.5,12.8,.587,.571,5.7,6.5,.885,0.4,5.9,6.4,5.9,0.7,1.1,2.9,2.0,26.0 -Trevon Duval,PG,20,MIL,3,0,2.0,0.7,1.0,.667,0.3,0.3,1.000,0.3,0.7,.500,.833,0.0,0.0,0,0.0,0.3,0.3,0.7,0.0,0.0,0.0,0.0,1.7 -Vince Edwards,SF,22,HOU,2,0,8.0,0.5,2.0,.250,0.5,2.0,.250,0.0,0.0,0,.375,0.0,0.0,0,0.5,0.5,1.0,0.0,0.0,0.0,0.0,0.0,1.5 -Henry Ellenson,PF,22,TOT,19,0,13.6,2.1,5.0,.411,0.9,2.0,.447,1.2,3.0,.386,.500,1.0,1.3,.760,0.3,3.2,3.5,0.8,0.4,0.1,0.5,1.4,6.0 -Henry Ellenson,PF,22,DET,2,0,12.5,2.0,5.0,.400,1.0,2.0,.500,1.0,3.0,.333,.500,1.0,1.0,1.000,0.0,4.5,4.5,0.5,0.0,0.0,0.0,1.0,6.0 -Henry Ellenson,PF,22,NYK,17,0,13.8,2.1,5.0,.412,0.9,2.0,.441,1.2,3.0,.392,.500,1.0,1.4,.739,0.3,3.1,3.4,0.9,0.4,0.1,0.5,1.5,6.0 -Wayne Ellington,SG,31,TOT,53,38,24.5,3.5,8.6,.403,2.6,7.0,.371,0.9,1.6,.541,.554,0.7,0.9,.796,0.3,1.8,2.0,1.4,1.0,0.1,0.8,1.7,10.3 -Wayne Ellington,SG,31,MIA,25,12,21.3,2.8,7.4,.375,2.3,6.2,.368,0.5,1.2,.414,.530,0.6,0.6,.875,0.2,1.6,1.9,1.2,1.0,0.1,0.6,1.6,8.4 -Wayne Ellington,SG,31,DET,28,26,27.3,4.1,9.8,.421,2.9,7.8,.373,1.2,2.0,.607,.570,0.9,1.2,.758,0.3,1.9,2.1,1.5,1.1,0.1,0.9,1.9,12.0 -Joel Embiid,C,24,PHI,64,64,33.7,9.1,18.7,.484,1.2,4.1,.300,7.8,14.6,.535,.517,8.2,10.1,.804,2.5,11.1,13.6,3.7,0.7,1.9,3.5,3.3,27.5 -James Ennis III,SF,28,TOT,58,27,21.2,2.4,5.1,.469,0.9,2.7,.353,1.4,2.4,.601,.563,1.0,1.4,.716,1.0,2.1,3.1,0.7,0.7,0.4,0.6,2.6,6.7 -James Ennis III,SF,28,HOU,40,25,23.7,2.6,5.3,.493,1.1,3.0,.367,1.5,2.3,.659,.597,1.1,1.5,.724,0.9,2.0,2.9,0.7,1.0,0.4,0.6,2.8,7.4 -James Ennis III,SF,28,PHI,18,2,15.6,1.9,4.6,.410,0.6,2.0,.306,1.3,2.6,.489,.476,0.9,1.3,.696,1.3,2.3,3.6,0.8,0.2,0.4,0.6,2.2,5.3 -Drew Eubanks,PF,21,SAS,23,0,4.9,0.7,1.1,.577,0.0,0.0,0,0.7,1.1,.577,.577,0.5,0.6,.846,0.3,1.2,1.5,0.3,0.1,0.2,0.3,0.5,1.8 -Jacob Evans,SG,21,GSW,30,1,6.8,0.6,1.8,.340,0.1,0.5,.267,0.5,1.3,.368,.377,0.0,0.0,.000,0.2,0.6,0.8,0.8,0.2,0.1,0.4,0.9,1.3 -Jawun Evans,PG,22,TOT,8,0,8.1,0.4,1.8,.214,0.0,0.4,.000,0.4,1.4,.273,.214,0.0,0.0,0,0.1,1.4,1.5,1.3,0.4,0.0,0.8,1.0,0.8 -Jawun Evans,PG,22,PHO,7,0,9.1,0.4,1.9,.231,0.0,0.3,.000,0.4,1.6,.273,.231,0.0,0.0,0,0.1,1.6,1.7,1.4,0.4,0.0,0.9,1.1,0.9 -Jawun Evans,PG,22,OKC,1,0,1.0,0.0,1.0,.000,0.0,1.0,.000,0.0,0.0,0,.000,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 -Tyreke Evans,SG,29,IND,69,18,20.3,3.7,9.6,.389,1.1,3.1,.356,2.6,6.4,.405,.448,1.7,2.3,.719,0.5,2.4,2.9,2.4,0.8,0.3,1.7,1.7,10.2 -Dante Exum,PG,23,UTA,42,1,15.8,2.4,5.7,.419,0.4,1.5,.290,2.0,4.3,.464,.456,1.6,2.0,.791,0.4,1.2,1.6,2.6,0.3,0.1,1.2,1.6,6.9 -Kenneth Faried,C,29,TOT,37,13,19.7,4.2,7.2,.589,0.2,0.7,.320,4.0,6.5,.617,.604,1.7,2.7,.646,2.6,4.1,6.8,0.5,0.5,0.6,0.9,2.2,10.4 -Kenneth Faried,C,29,BRK,12,0,9.8,2.1,3.5,.595,0.1,0.4,.200,2.0,3.1,.649,.607,0.8,1.3,.625,1.3,2.4,3.7,0.2,0.2,0.3,0.5,0.7,5.1 -Kenneth Faried,C,29,HOU,25,13,24.4,5.2,8.9,.587,0.3,0.8,.350,5.0,8.1,.611,.603,2.2,3.3,.651,3.3,5.0,8.2,0.7,0.6,0.8,1.1,3.0,12.9 -Derrick Favors,C,27,UTA,76,70,23.2,4.8,8.1,.586,0.2,1.0,.218,4.6,7.1,.640,.600,2.0,3.0,.675,2.7,4.6,7.4,1.2,0.7,1.4,1.1,2.1,11.8 -Cristiano Felício,C,26,CHI,60,0,12.4,1.6,3.0,.531,0.0,0.1,.000,1.6,2.9,.543,.531,0.8,1.2,.685,1.3,2.3,3.6,0.6,0.2,0.1,0.6,1.2,4.0 -Raymond Felton,PG,34,OKC,33,0,11.5,1.7,4.1,.407,0.6,1.8,.328,1.1,2.2,.473,.481,0.4,0.4,.923,0.1,0.9,1.0,1.6,0.3,0.2,0.4,0.9,4.3 -Terrance Ferguson,SG,20,OKC,74,74,26.1,2.5,5.8,.429,1.4,3.9,.366,1.1,1.9,.560,.552,0.5,0.7,.725,0.4,1.5,1.9,1.0,0.5,0.2,0.6,3.1,6.9 -Yogi Ferrell,PG,25,SAC,71,3,15.0,2.2,5.0,.435,0.8,2.1,.362,1.4,2.9,.488,.511,0.8,0.9,.896,0.2,1.4,1.5,1.9,0.5,0.1,0.6,0.9,5.9 -Dorian Finney-Smith,SF,25,DAL,81,26,24.5,2.8,6.5,.432,1.0,3.1,.311,1.8,3.4,.544,.507,0.9,1.3,.709,1.7,3.1,4.8,1.2,0.9,0.4,0.9,2.3,7.5 -Bryn Forbes,SG,25,SAS,82,81,28.0,4.4,9.6,.456,2.1,5.0,.426,2.3,4.6,.489,.568,0.8,1.0,.885,0.2,2.7,2.9,2.1,0.5,0.0,1.0,1.9,11.8 -Evan Fournier,SG,26,ORL,81,81,31.5,5.8,13.2,.438,1.9,5.6,.340,3.9,7.6,.509,.509,1.7,2.1,.806,0.5,2.7,3.2,3.6,0.9,0.1,1.9,2.8,15.1 -De'Aaron Fox,PG,21,SAC,81,81,31.4,6.2,13.6,.458,1.1,2.9,.371,5.2,10.7,.482,.497,3.7,5.1,.727,0.5,3.2,3.8,7.3,1.6,0.6,2.8,2.5,17.3 -Melvin Frazier,SG,22,ORL,10,0,4.4,0.7,2.1,.333,0.0,0.6,.000,0.7,1.5,.467,.333,0.1,0.4,.250,0.3,0.2,0.5,0.1,0.1,0.0,0.1,0.6,1.5 -Tim Frazier,PG,28,TOT,59,19,19.0,2.0,4.4,.444,0.6,1.7,.366,1.3,2.7,.494,.515,0.7,0.9,.759,0.7,2.2,2.8,4.2,0.5,0.1,1.3,1.9,5.3 -Tim Frazier,PG,28,NOP,47,17,19.3,1.9,4.1,.451,0.6,1.6,.351,1.3,2.5,.517,.521,0.7,0.9,.780,0.7,2.2,2.9,4.4,0.5,0.1,1.3,2.0,5.0 -Tim Frazier,PG,28,MIL,12,2,17.6,2.3,5.5,.424,0.8,2.0,.417,1.5,3.5,.429,.500,0.8,1.1,.692,0.8,1.8,2.6,3.5,0.4,0.1,1.3,1.6,6.3 -Jimmer Fredette,SG,29,PHO,6,0,10.8,1.3,4.8,.276,0.0,2.2,.000,1.3,2.7,.500,.276,1.0,1.0,1.000,0.2,1.0,1.2,1.3,0.5,0.0,0.8,0.8,3.7 -Enes Freedom,C,26,TOT,67,31,24.5,5.6,10.2,.549,0.1,0.5,.294,5.4,9.7,.562,.556,2.3,2.9,.787,3.8,6.0,9.8,1.7,0.5,0.4,1.8,2.5,13.7 -Enes Freedom,C,26,NYK,44,23,25.6,5.7,10.6,.536,0.2,0.5,.318,5.5,10.1,.547,.544,2.4,2.9,.814,3.9,6.6,10.5,1.9,0.4,0.4,1.8,2.4,14.0 -Enes Freedom,C,26,POR,23,8,22.3,5.4,9.3,.577,0.1,0.5,.250,5.3,8.8,.596,.584,2.2,3.0,.735,3.7,4.9,8.6,1.4,0.6,0.4,1.7,2.7,13.1 -Channing Frye,C,35,CLE,36,6,9.5,1.2,3.3,.368,0.9,2.2,.405,0.3,1.1,.289,.504,0.3,0.4,.786,0.1,1.3,1.4,0.6,0.2,0.1,0.4,1.2,3.6 -Markelle Fultz,SG,20,PHI,19,15,22.5,3.4,8.2,.419,0.2,0.7,.286,3.2,7.4,.433,.432,1.1,1.9,.568,1.4,2.3,3.7,3.1,0.9,0.3,1.3,2.7,8.2 -Danilo Gallinari,SF,30,LAC,68,68,30.3,6.0,13.0,.463,2.4,5.5,.433,3.6,7.5,.484,.554,5.4,6.0,.904,0.8,5.3,6.1,2.6,0.7,0.3,1.5,1.9,19.8 -Langston Galloway,SG,27,DET,80,4,21.8,2.9,7.3,.388,1.7,4.8,.355,1.2,2.6,.449,.503,1.0,1.2,.844,0.6,1.5,2.1,1.1,0.5,0.1,0.3,1.7,8.4 -Billy Garrett,SG,24,NYK,4,0,15.8,2.8,6.8,.407,0.0,2.0,.000,2.8,4.8,.579,.407,1.0,1.0,1.000,0.0,0.8,0.8,1.8,0.3,0.3,0.5,0.8,6.5 -Marc Gasol,C,34,TOT,79,72,30.8,4.9,11.0,.448,1.3,3.5,.363,3.7,7.6,.487,.505,2.4,3.2,.759,1.0,6.9,7.9,4.4,1.1,1.1,2.0,2.7,13.6 -Marc Gasol,C,34,MEM,53,53,33.7,5.7,12.9,.444,1.4,4.2,.344,4.3,8.7,.491,.499,2.9,3.8,.756,1.1,7.5,8.6,4.7,1.1,1.2,2.2,2.8,15.7 -Marc Gasol,C,34,TOR,26,19,24.9,3.3,7.2,.465,0.9,2.0,.442,2.5,5.2,.474,.527,1.5,2.0,.769,0.9,5.7,6.6,3.9,0.9,0.9,1.4,2.7,9.1 -Pau Gasol*,C,38,TOT,30,6,12.0,1.4,3.1,.447,0.2,0.4,.462,1.2,2.7,.444,.479,0.9,1.3,.700,0.7,3.8,4.6,1.7,0.2,0.5,0.5,1.0,3.9 -Pau Gasol*,C,38,SAS,27,6,12.2,1.5,3.3,.466,0.2,0.4,.500,1.3,2.9,.462,.494,1.0,1.4,.711,0.8,3.9,4.7,1.9,0.2,0.5,0.5,1.0,4.2 -Pau Gasol*,C,38,MIL,3,0,10.0,0.3,2.0,.167,0.3,1.0,.333,0.0,1.0,.000,.250,0.3,0.7,.500,0.0,3.3,3.3,0.7,0.0,0.3,0.7,0.3,1.3 -Rudy Gay,PF,32,SAS,69,51,26.7,5.4,10.8,.504,1.1,2.7,.402,4.4,8.1,.537,.554,1.7,2.1,.816,0.9,5.9,6.8,2.6,0.8,0.5,1.7,2.3,13.7 -Paul George,SF,28,OKC,77,77,36.9,9.2,21.0,.438,3.8,9.8,.386,5.4,11.1,.484,.529,5.9,7.0,.839,1.4,6.8,8.2,4.1,2.2,0.4,2.7,2.8,28.0 -Taj Gibson,PF,33,MIN,70,57,24.1,4.3,7.7,.566,0.2,0.5,.324,4.2,7.2,.583,.576,1.9,2.5,.757,2.5,4.1,6.5,1.2,0.8,0.6,1.0,2.7,10.8 -Harry Giles,PF,20,SAC,58,0,14.1,3.0,6.0,.503,0.0,0.1,.000,3.0,5.9,.512,.503,1.0,1.6,.637,1.1,2.7,3.8,1.5,0.5,0.4,1.3,2.6,7.0 -Shai Gilgeous-Alexander,PG,20,LAC,82,73,26.5,4.2,8.7,.476,0.6,1.7,.367,3.5,7.0,.503,.512,1.9,2.4,.800,0.7,2.1,2.8,3.3,1.2,0.5,1.7,2.1,10.8 -Rudy Gobert,C,26,UTA,81,80,31.8,5.9,8.8,.669,0.0,0.0,0,5.9,8.8,.669,.669,4.1,6.4,.636,3.8,9.0,12.9,2.0,0.8,2.3,1.6,2.9,15.9 -Brandon Goodwin,PG,23,DEN,16,0,3.6,0.4,1.4,.261,0.1,0.4,.333,0.3,1.1,.235,.304,0.6,0.7,.818,0.1,0.1,0.2,0.9,0.0,0.0,0.2,0.4,1.4 -Aaron Gordon,PF,23,ORL,78,78,33.8,6.0,13.4,.449,1.6,4.4,.349,4.5,9.0,.499,.507,2.4,3.2,.731,1.7,5.7,7.4,3.7,0.7,0.7,2.1,2.2,16.0 -Eric Gordon,SG,30,HOU,68,53,31.7,5.6,13.8,.409,3.2,8.8,.360,2.5,5.0,.497,.525,1.8,2.2,.783,0.3,1.9,2.2,1.9,0.6,0.4,1.3,2.1,16.2 -Marcin Gortat,C,34,LAC,47,43,16.0,2.1,4.0,.532,0.0,0.0,0,2.1,4.0,.532,.532,0.7,1.0,.729,1.4,4.1,5.6,1.4,0.1,0.5,1.1,2.0,5.0 -Devonte' Graham,PG,23,CHO,46,3,14.7,1.6,4.7,.343,0.7,2.6,.281,0.9,2.1,.421,.421,0.8,1.0,.761,0.2,1.2,1.4,2.6,0.5,0.0,0.7,1.0,4.7 -Treveon Graham,SG,25,BRK,35,21,20.4,1.8,5.5,.335,1.1,3.7,.297,0.7,1.8,.413,.435,0.5,0.6,.818,0.7,2.4,3.1,1.0,0.4,0.2,0.5,1.9,5.3 -Jerami Grant,PF,24,OKC,80,77,32.7,5.1,10.3,.497,1.4,3.7,.392,3.7,6.6,.555,.567,2.0,2.8,.710,1.2,4.0,5.2,1.0,0.8,1.3,0.8,2.7,13.6 -Jerian Grant,PG,26,ORL,60,1,15.7,1.5,3.7,.418,0.7,1.8,.364,0.9,1.8,.473,.509,0.4,0.7,.650,0.3,1.3,1.6,2.6,0.7,0.1,0.9,1.3,4.2 -Donte Grantham,SF,23,OKC,3,0,0.7,0.0,0.7,.000,0.0,0.7,.000,0.0,0.0,0,.000,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 -Danny Green,SG,31,TOR,80,80,27.7,3.7,7.9,.465,2.5,5.4,.455,1.2,2.4,.487,.622,0.5,0.6,.841,0.8,3.2,4.0,1.6,0.9,0.7,0.9,2.1,10.3 -Draymond Green,PF,28,GSW,66,66,31.3,2.8,6.4,.445,0.7,2.5,.285,2.1,3.9,.549,.501,1.0,1.4,.692,0.9,6.4,7.3,6.9,1.4,1.1,2.6,3.0,7.4 -Gerald Green,SG,33,HOU,73,0,20.2,3.2,7.9,.400,2.1,6.0,.354,1.0,1.9,.547,.535,0.8,0.9,.838,0.4,2.1,2.5,0.5,0.5,0.4,0.8,1.7,9.2 -JaMychal Green,PF,28,TOT,65,6,21.1,3.5,7.3,.483,1.1,2.7,.403,2.4,4.6,.530,.558,1.2,1.6,.792,1.6,4.7,6.3,0.8,0.7,0.5,1.3,3.0,9.4 -JaMychal Green,PF,28,MEM,41,4,22.0,3.7,7.6,.484,0.9,2.3,.396,2.8,5.3,.523,.545,1.5,2.0,.788,1.8,4.4,6.1,0.9,0.8,0.6,1.6,3.0,9.8 -JaMychal Green,PF,28,LAC,24,2,19.6,3.3,6.8,.482,1.4,3.3,.413,1.9,3.5,.548,.582,0.7,0.9,.810,1.3,5.2,6.5,0.6,0.5,0.3,1.0,2.9,8.7 -Jeff Green,PF,32,WAS,77,44,27.2,4.2,8.9,.475,1.4,4.2,.347,2.8,4.8,.586,.555,2.4,2.7,.888,0.7,3.3,4.0,1.8,0.6,0.5,1.3,2.1,12.3 -Blake Griffin,PF,29,DET,75,75,35.0,8.3,17.9,.462,2.5,7.0,.362,5.7,10.9,.525,.532,5.5,7.3,.753,1.3,6.2,7.5,5.4,0.7,0.4,3.4,2.7,24.5 -Daniel Hamilton,SG,23,ATL,19,3,10.7,1.2,3.2,.383,0.4,1.2,.348,0.8,1.9,.405,.450,0.2,0.3,.500,0.5,1.9,2.5,1.2,0.3,0.1,0.8,1.0,3.0 -Dusty Hannahs,SG,25,MEM,2,0,13.0,1.5,6.0,.250,0.0,2.5,.000,1.5,3.5,.429,.250,1.0,1.0,1.000,0.0,0.5,0.5,2.5,0.5,0.0,0.5,0.5,4.0 -Tim Hardaway Jr.,SG,26,TOT,65,63,31.6,6.0,15.3,.393,2.5,7.3,.340,3.5,7.9,.442,.474,3.6,4.2,.841,0.5,2.9,3.4,2.4,0.8,0.1,1.6,2.2,18.1 -Tim Hardaway Jr.,SG,26,NYK,46,46,32.6,6.1,15.8,.388,2.5,7.3,.347,3.6,8.5,.424,.469,4.3,5.1,.854,0.6,2.9,3.5,2.7,0.9,0.1,1.8,2.3,19.1 -Tim Hardaway Jr.,SG,26,DAL,19,17,29.4,5.7,14.1,.404,2.4,7.4,.321,3.3,6.7,.496,.489,1.7,2.3,.767,0.4,2.8,3.2,1.9,0.6,0.1,1.3,1.8,15.5 -James Harden,PG,29,HOU,78,78,36.8,10.8,24.5,.442,4.8,13.2,.368,6.0,11.3,.528,.541,9.7,11.0,.879,0.8,5.8,6.6,7.5,2.0,0.7,5.0,3.1,36.1 -Maurice Harkless,SF,25,POR,60,53,23.6,3.2,6.5,.487,0.6,2.0,.275,2.6,4.5,.582,.530,0.8,1.2,.671,1.3,3.2,4.5,1.2,1.1,0.9,0.8,2.7,7.7 -Montrezl Harrell,C,25,LAC,82,5,26.3,6.7,10.8,.615,0.0,0.2,.176,6.6,10.6,.623,.617,3.2,5.0,.643,2.2,4.3,6.5,2.0,0.9,1.3,1.6,3.1,16.6 -Devin Harris,PG,35,DAL,68,2,15.8,1.9,5.1,.380,0.9,2.9,.310,1.0,2.2,.476,.470,1.5,2.0,.761,0.2,1.5,1.6,1.8,0.5,0.2,0.8,2.0,6.3 -Gary Harris,SG,24,DEN,57,48,28.8,4.7,11.2,.424,1.4,4.2,.339,3.3,6.9,.476,.488,2.0,2.5,.799,0.7,2.1,2.8,2.2,1.0,0.3,1.2,2.0,12.9 -Joe Harris,SG,27,BRK,76,76,30.2,4.9,9.8,.500,2.4,5.1,.474,2.5,4.8,.528,.622,1.4,1.8,.827,0.7,3.1,3.8,2.4,0.5,0.2,1.6,2.4,13.7 -Tobias Harris,PF,26,TOT,82,82,34.7,7.5,15.3,.487,1.9,4.8,.397,5.5,10.5,.528,.549,3.2,3.7,.866,0.8,7.0,7.9,2.8,0.6,0.5,1.8,2.2,20.0 -Tobias Harris,PF,26,LAC,55,55,34.6,7.7,15.5,.496,2.0,4.7,.434,5.7,10.9,.523,.561,3.5,4.0,.877,0.7,7.2,7.9,2.7,0.7,0.4,2.0,2.2,20.9 -Tobias Harris,PF,26,PHI,27,27,35.0,6.9,14.8,.469,1.6,5.0,.326,5.3,9.8,.542,.524,2.7,3.3,.841,1.2,6.7,7.9,2.9,0.4,0.5,1.6,2.3,18.2 -Andrew Harrison,PG,24,TOT,17,0,11.0,0.9,3.1,.302,0.3,1.5,.200,0.6,1.6,.393,.349,1.0,1.1,.944,0.4,0.8,1.2,1.4,0.2,0.1,0.7,2.0,3.2 -Andrew Harrison,PG,24,MEM,1,0,5.0,1.0,2.0,.500,0.0,1.0,.000,1.0,1.0,1.000,.500,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0 -Andrew Harrison,PG,24,CLE,10,0,14.4,1.2,3.9,.308,0.3,1.4,.214,0.9,2.5,.360,.346,1.6,1.6,1.000,0.4,1.1,1.5,1.7,0.4,0.2,1.0,2.6,4.3 -Andrew Harrison,PG,24,NOP,6,0,6.3,0.5,2.0,.250,0.3,1.7,.200,0.2,0.3,.500,.333,0.2,0.3,.500,0.3,0.5,0.8,1.2,0.0,0.0,0.3,1.2,1.5 -Shaquille Harrison,SG,25,CHI,73,11,19.6,2.5,5.8,.432,0.3,1.2,.270,2.2,4.6,.475,.460,1.1,1.7,.667,0.5,2.6,3.0,1.9,1.2,0.4,0.8,1.7,6.5 -Josh Hart,SG,23,LAL,67,22,25.6,2.8,6.9,.407,1.4,4.1,.336,1.4,2.8,.511,.506,0.8,1.2,.688,0.5,3.2,3.7,1.4,1.0,0.6,0.9,2.2,7.8 -Isaiah Hartenstein,PF,20,HOU,28,0,7.9,0.7,1.5,.488,0.1,0.2,.333,0.6,1.3,.514,.512,0.4,0.5,.786,0.8,0.9,1.7,0.5,0.3,0.4,0.5,2.0,1.9 -Udonis Haslem,C,38,MIA,10,1,7.4,1.1,3.3,.333,0.0,1.2,.000,1.1,2.1,.524,.333,0.3,0.4,.750,0.3,2.4,2.7,0.2,0.0,0.0,0.3,0.9,2.5 -Gordon Hayward,PF,28,BOS,72,18,25.9,4.1,8.8,.466,1.1,3.2,.333,3.0,5.6,.542,.527,2.2,2.6,.834,0.7,3.8,4.5,3.4,0.9,0.3,1.5,1.4,11.5 -John Henson,C,28,MIL,14,0,13.4,2.2,4.8,.463,0.8,2.2,.355,1.4,2.6,.556,.545,0.4,0.7,.600,1.1,3.9,5.1,1.0,0.5,0.8,0.9,1.1,5.6 -Juancho Hernangómez,PF,23,DEN,70,25,19.4,2.0,4.5,.439,0.9,2.6,.365,1.0,1.9,.541,.545,0.9,1.2,.767,0.9,2.9,3.8,0.8,0.4,0.3,0.5,1.3,5.8 -Willy Hernangómez,C,24,CHO,58,3,14.0,2.6,5.1,.519,0.3,0.7,.385,2.4,4.4,.539,.544,1.7,2.5,.694,2.0,3.3,5.4,1.0,0.3,0.3,1.0,1.7,7.3 -Mario Hezonja,SF,23,NYK,58,24,20.8,3.3,8.0,.412,0.7,2.6,.276,2.6,5.4,.478,.457,1.5,2.0,.763,0.5,3.6,4.1,1.5,1.0,0.1,1.5,1.9,8.8 -Isaiah Hicks,PF,24,NYK,3,0,10.7,1.3,2.7,.500,0.0,0.0,0,1.3,2.7,.500,.500,1.3,1.7,.800,0.7,1.7,2.3,0.7,0.3,1.0,0.3,1.7,4.0 -Buddy Hield,SG,26,SAC,82,82,31.9,7.6,16.6,.458,3.4,7.9,.427,4.2,8.6,.487,.560,2.1,2.4,.886,1.3,3.7,5.0,2.5,0.7,0.4,1.8,2.5,20.7 -Haywood Highsmith,SF,22,PHI,5,0,8.0,0.8,2.0,.400,0.2,1.0,.200,0.6,1.0,.600,.450,0.0,0.4,.000,0.0,1.0,1.0,0.4,0.2,0.0,0.2,0.2,1.8 -Nenê,C,36,HOU,42,2,13.0,1.4,2.8,.517,0.0,0.1,.000,1.4,2.7,.531,.517,0.7,1.1,.660,0.8,2.1,2.9,0.6,0.4,0.4,0.3,2.1,3.6 -George Hill,PG,32,TOT,60,13,21.7,2.8,6.3,.452,0.8,2.6,.314,2.0,3.7,.547,.516,1.2,1.4,.824,0.7,1.8,2.5,2.3,0.9,0.1,0.9,1.7,7.6 -George Hill,PG,32,CLE,13,13,26.5,4.2,8.2,.514,1.0,2.2,.464,3.2,6.1,.532,.575,1.3,1.5,.850,0.9,1.2,2.1,2.8,0.9,0.1,1.5,2.7,10.8 -George Hill,PG,32,MIL,47,0,20.4,2.4,5.7,.428,0.7,2.7,.280,1.7,3.1,.556,.493,1.1,1.4,.815,0.6,2.0,2.6,2.1,0.9,0.1,0.7,1.4,6.8 -Solomon Hill,SF,27,NOP,44,15,20.0,1.5,4.0,.382,0.7,2.3,.317,0.8,1.8,.468,.472,0.5,0.7,.719,0.8,2.3,3.0,1.3,0.5,0.2,0.7,1.8,4.3 -Aaron Holiday,PG,22,IND,50,0,12.9,2.1,5.2,.401,0.9,2.5,.339,1.2,2.7,.459,.483,0.8,1.0,.820,0.1,1.2,1.3,1.7,0.4,0.3,0.8,1.4,5.9 -Jrue Holiday,SG,28,NOP,67,67,35.9,8.2,17.3,.472,1.8,5.4,.325,6.4,11.9,.539,.523,3.1,4.0,.768,1.1,3.9,5.0,7.7,1.6,0.8,3.1,2.2,21.2 -Justin Holiday,SG,29,TOT,82,77,31.8,3.7,9.5,.386,2.0,5.7,.348,1.7,3.8,.442,.490,1.2,1.3,.896,0.6,3.4,3.9,1.8,1.5,0.4,1.3,2.0,10.5 -Justin Holiday,SG,29,CHI,38,38,34.9,4.0,10.4,.383,2.6,7.1,.359,1.4,3.3,.435,.506,1.1,1.2,.891,0.5,3.9,4.4,2.2,1.8,0.6,1.2,2.1,11.6 -Justin Holiday,SG,29,MEM,44,39,29.1,3.4,8.7,.389,1.5,4.4,.333,1.9,4.3,.447,.474,1.2,1.4,.900,0.6,2.9,3.5,1.4,1.2,0.3,1.3,1.9,9.5 -John Holland,SF,30,CLE,1,0,1.0,0.0,0.0,0,0.0,0.0,0,0.0,0.0,0,0,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 -Rondae Hollis-Jefferson,SF,24,BRK,59,21,20.9,3.4,8.3,.411,0.2,0.8,.184,3.2,7.4,.436,.420,2.0,3.1,.645,1.4,3.8,5.3,1.6,0.7,0.5,1.2,1.8,8.9 -Richaun Holmes,C,25,PHO,70,4,16.9,3.2,5.2,.608,0.0,0.0,0,3.2,5.2,.608,.608,1.8,2.5,.731,1.6,3.1,4.7,0.9,0.6,1.1,0.7,2.8,8.2 -Rodney Hood,SG,26,TOT,72,49,26.3,4.1,9.3,.435,1.2,3.3,.356,2.9,6.0,.478,.498,1.9,2.2,.884,0.3,1.8,2.2,1.8,0.8,0.2,0.8,2.0,11.2 -Rodney Hood,SG,26,CLE,45,45,27.4,4.3,10.1,.427,1.2,3.4,.362,3.1,6.7,.460,.488,2.3,2.5,.912,0.4,2.1,2.5,2.0,0.8,0.1,0.8,2.2,12.2 -Rodney Hood,SG,26,POR,27,4,24.4,3.6,8.0,.452,1.1,3.1,.345,2.6,4.9,.519,.518,1.2,1.5,.805,0.3,1.4,1.7,1.3,0.8,0.3,0.7,1.7,9.6 -Al Horford,C,32,BOS,68,68,29.0,5.7,10.6,.535,1.1,3.0,.360,4.6,7.6,.604,.586,1.1,1.4,.821,1.8,5.0,6.7,4.2,0.9,1.3,1.5,1.9,13.6 -Danuel House Jr.,SF,25,HOU,39,13,25.1,3.0,6.5,.468,1.9,4.6,.416,1.1,1.9,.595,.615,1.4,1.8,.789,0.6,2.9,3.6,1.0,0.5,0.3,0.9,2.1,9.4 -Dwight Howard,C,33,WAS,9,9,25.6,4.8,7.7,.623,0.0,0.0,0,4.8,7.7,.623,.623,3.2,5.3,.604,2.7,6.6,9.2,0.4,0.8,0.4,1.8,3.8,12.8 -Kevin Huerter,SG,20,ATL,75,59,27.3,3.7,8.8,.419,1.8,4.7,.385,1.9,4.1,.457,.522,0.5,0.7,.732,0.8,2.5,3.3,2.9,0.9,0.3,1.5,2.1,9.7 -Isaac Humphries,C,21,ATL,5,1,11.2,1.2,4.2,.286,0.6,2.2,.273,0.6,2.0,.300,.357,0.0,0.0,0,0.8,1.4,2.2,0.0,0.2,0.0,0.0,1.4,3.0 -R.J. Hunter,SG,25,BOS,1,0,26.0,6.0,13.0,.462,4.0,10.0,.400,2.0,3.0,.667,.615,1.0,2.0,.500,1.0,2.0,3.0,3.0,1.0,0.0,0.0,3.0,17.0 -Chandler Hutchison,SF,22,CHI,44,14,20.3,2.2,4.8,.459,0.3,1.1,.280,1.9,3.6,.516,.493,0.5,0.9,.605,0.7,3.5,4.2,0.8,0.5,0.1,0.6,1.3,5.2 -Serge Ibaka,C,29,TOR,74,51,27.2,6.3,11.9,.529,0.7,2.3,.290,5.6,9.6,.586,.557,1.8,2.4,.763,2.1,6.0,8.1,1.3,0.4,1.4,1.5,2.9,15.0 -Andre Iguodala,SF,35,GSW,68,13,23.2,2.2,4.4,.500,0.7,2.1,.333,1.5,2.3,.652,.579,0.6,1.0,.582,0.7,3.0,3.7,3.2,0.9,0.8,0.8,1.4,5.7 -Ersan İlyasova,PF,31,MIL,67,7,18.4,2.5,5.7,.438,0.9,2.4,.363,1.6,3.3,.493,.514,0.9,1.1,.824,1.4,3.1,4.5,0.8,0.5,0.3,0.7,2.6,6.8 -Joe Ingles,PF,31,UTA,82,82,31.3,4.4,9.8,.448,2.3,5.9,.391,2.1,3.9,.533,.565,1.1,1.5,.707,0.4,3.6,4.0,5.7,1.2,0.2,2.4,2.2,12.1 -Andre Ingram,SG,33,LAL,4,0,3.8,0.0,1.5,.000,0.0,0.8,.000,0.0,0.8,.000,.000,0.0,0.0,0,0.3,0.3,0.5,0.0,0.3,0.0,0.3,0.0,0.0 -Brandon Ingram,SF,21,LAL,52,52,33.8,7.0,14.0,.497,0.6,1.8,.330,6.4,12.2,.521,.518,3.8,5.6,.675,0.8,4.3,5.1,3.0,0.5,0.6,2.5,2.9,18.3 -Kyrie Irving,PG,26,BOS,67,67,33.0,9.0,18.5,.487,2.6,6.5,.401,6.4,12.0,.533,.557,3.2,3.7,.873,1.1,3.9,5.0,6.9,1.5,0.5,2.6,2.5,23.8 -Jonathan Isaac,PF,21,ORL,75,64,26.6,3.5,8.1,.429,1.1,3.5,.323,2.3,4.6,.510,.499,1.5,1.8,.815,1.3,4.2,5.5,1.1,0.8,1.3,1.0,1.9,9.6 -Wes Iwundu,SF,24,ORL,68,13,18.1,1.7,4.0,.412,0.4,1.2,.367,1.2,2.9,.431,.465,1.2,1.5,.816,0.5,2.2,2.7,1.1,0.4,0.3,0.6,1.8,5.0 -Demetrius Jackson,PG,24,PHI,6,0,6.5,1.3,2.5,.533,0.3,1.0,.333,1.0,1.5,.667,.600,0.7,0.7,1.000,0.3,0.2,0.5,0.8,0.3,0.0,0.2,0.7,3.7 -Frank Jackson,PG,20,NOP,61,16,19.2,3.2,7.3,.434,0.9,2.8,.314,2.3,4.6,.507,.493,0.9,1.2,.740,0.4,1.8,2.2,1.1,0.4,0.0,0.8,1.5,8.1 -Jaren Jackson Jr.,PF,19,MEM,58,56,26.1,5.1,10.2,.506,0.9,2.4,.359,4.3,7.7,.553,.549,2.6,3.4,.766,1.3,3.4,4.7,1.1,0.9,1.4,1.7,3.8,13.8 -Josh Jackson,SG,21,PHO,79,29,25.2,4.4,10.6,.413,0.9,2.8,.324,3.5,7.8,.445,.456,1.8,2.7,.671,0.8,3.6,4.4,2.3,0.9,0.7,2.2,2.6,11.5 -Justin Jackson,SF,23,TOT,81,14,19.9,2.7,6.0,.447,1.1,3.0,.355,1.6,3.0,.542,.537,0.8,1.0,.785,0.5,2.1,2.6,1.2,0.4,0.2,0.4,1.2,7.2 -Justin Jackson,SF,23,SAC,52,3,20.8,2.4,5.7,.424,1.1,3.1,.346,1.3,2.6,.515,.517,0.8,1.0,.820,0.5,2.3,2.8,1.3,0.4,0.3,0.4,1.3,6.7 -Justin Jackson,SF,23,DAL,29,11,18.3,3.2,6.6,.484,1.1,3.0,.372,2.1,3.6,.577,.568,0.7,1.0,.724,0.7,1.6,2.3,1.0,0.3,0.0,0.2,1.1,8.2 -Reggie Jackson,PG,28,DET,82,82,27.9,5.4,12.8,.421,2.1,5.7,.369,3.3,7.0,.464,.504,2.5,2.9,.864,0.5,2.1,2.6,4.2,0.7,0.1,1.8,2.5,15.4 -LeBron James,SF,34,LAL,55,55,35.2,10.1,19.9,.510,2.0,5.9,.339,8.1,14.0,.582,.560,5.1,7.6,.665,1.0,7.4,8.5,8.3,1.3,0.6,3.6,1.7,27.4 -Amile Jefferson,PF,25,ORL,12,0,5.7,0.8,1.3,.625,0.0,0.0,0,0.8,1.3,.625,.625,0.6,0.7,.875,0.5,1.3,1.8,0.3,0.3,0.3,0.1,0.7,2.3 -John Jenkins,SG,27,TOT,26,0,12.8,1.6,4.0,.400,0.8,2.2,.379,0.8,1.8,.426,.505,0.6,0.7,.833,0.2,1.2,1.4,0.8,0.0,0.1,0.3,0.4,4.7 -John Jenkins,SG,27,WAS,4,0,3.5,0.5,0.5,1.000,0.5,0.5,1.000,0.0,0.0,0,1.500,0.0,0.0,0,0.0,0.3,0.3,0.3,0.0,0.0,0.0,0.0,1.5 -John Jenkins,SG,27,NYK,22,0,14.5,1.8,4.7,.388,0.9,2.5,.357,0.9,2.1,.426,.485,0.7,0.8,.833,0.3,1.4,1.6,1.0,0.0,0.1,0.4,0.5,5.2 -Jonas Jerebko,PF,31,GSW,73,6,16.7,2.2,4.9,.459,0.9,2.6,.367,1.3,2.3,.563,.556,0.9,1.1,.800,1.0,3.0,3.9,1.3,0.4,0.2,0.6,1.9,6.3 -Alize Johnson,PF,22,IND,14,0,4.6,0.3,1.1,.250,0.1,0.1,.500,0.2,1.0,.214,.281,0.3,0.6,.500,0.3,1.1,1.4,0.1,0.1,0.2,0.0,0.5,0.9 -Amir Johnson,C,31,PHI,51,6,10.4,1.5,3.1,.503,0.2,0.8,.300,1.3,2.3,.573,.541,0.6,0.8,.756,0.9,2.0,2.9,1.2,0.3,0.3,0.9,1.9,3.9 -B.J. Johnson,SF,23,TOT,7,0,7.0,1.3,2.6,.500,0.4,1.0,.429,0.9,1.6,.545,.583,0.3,0.3,1.000,0.3,0.9,1.1,0.0,0.3,0.0,0.3,0.6,3.3 -B.J. Johnson,SF,23,ATL,6,0,7.2,1.3,2.7,.500,0.5,1.0,.500,0.8,1.7,.500,.594,0.3,0.3,1.000,0.3,1.0,1.3,0.0,0.3,0.0,0.3,0.5,3.5 -B.J. Johnson,SF,23,SAC,1,0,6.0,1.0,2.0,.500,0.0,1.0,.000,1.0,1.0,1.000,.500,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0 -James Johnson,PF,31,MIA,55,33,21.2,3.0,6.9,.433,0.9,2.7,.336,2.1,4.2,.496,.499,0.9,1.3,.714,0.4,2.8,3.2,2.5,0.6,0.5,1.3,2.1,7.8 -Stanley Johnson,SF,22,TOT,66,7,18.3,2.6,6.7,.389,0.9,3.3,.288,1.7,3.4,.484,.459,0.8,1.0,.781,0.5,2.8,3.3,1.3,0.9,0.2,1.2,1.7,6.9 -Stanley Johnson,SF,22,DET,48,7,20.0,2.8,7.3,.381,1.1,3.8,.282,1.7,3.5,.488,.454,0.9,1.1,.804,0.5,3.1,3.6,1.3,1.0,0.3,1.1,1.9,7.5 -Stanley Johnson,SF,22,NOP,18,0,13.7,2.1,5.1,.418,0.6,1.9,.324,1.5,3.2,.474,.478,0.5,0.7,.692,0.5,1.8,2.3,1.6,0.7,0.1,1.4,1.2,5.3 -Tyler Johnson,PG,26,TOT,57,22,26.8,3.8,9.2,.413,1.6,4.6,.346,2.2,4.7,.477,.498,1.7,2.2,.748,0.6,2.4,3.0,2.9,0.9,0.5,1.4,1.7,10.9 -Tyler Johnson,PG,26,MIA,44,10,25.5,3.9,9.1,.426,1.6,4.6,.353,2.3,4.5,.503,.516,1.4,2.0,.693,0.4,2.3,2.8,2.5,0.9,0.5,1.4,1.6,10.8 -Tyler Johnson,PG,26,PHO,13,12,31.2,3.5,9.6,.368,1.4,4.3,.321,2.2,5.3,.406,.440,2.6,3.0,.872,1.2,2.8,4.0,4.2,1.1,0.5,1.1,1.9,11.1 -Wesley Johnson,SF,31,TOT,38,13,14.1,1.2,3.4,.352,0.7,2.0,.329,0.5,1.4,.385,.449,0.3,0.5,.684,0.3,1.6,1.9,0.6,0.4,0.3,0.5,1.7,3.4 -Wesley Johnson,SF,31,NOP,26,13,14.5,1.3,3.4,.398,0.7,1.9,.380,0.6,1.5,.421,.506,0.2,0.3,.667,0.4,1.7,2.1,0.6,0.5,0.3,0.5,1.7,3.7 -Wesley Johnson,SF,31,WAS,12,0,13.1,0.8,3.3,.250,0.5,2.2,.231,0.3,1.2,.286,.325,0.6,0.8,.700,0.2,1.3,1.5,0.6,0.2,0.4,0.5,1.5,2.8 -Nikola Jokić,C,23,DEN,80,80,31.3,7.7,15.1,.511,1.0,3.4,.307,6.7,11.7,.569,.545,3.6,4.4,.821,2.9,8.0,10.8,7.3,1.4,0.7,3.1,2.9,20.1 -Damian Jones,C,23,GSW,24,22,17.1,2.2,3.1,.716,0.0,0.0,0,2.2,3.1,.716,.716,1.0,1.5,.649,1.3,1.8,3.1,1.2,0.5,1.0,0.7,2.6,5.4 -Derrick Jones Jr.,SF,21,MIA,60,14,19.2,2.7,5.4,.494,0.5,1.5,.308,2.2,3.9,.567,.537,1.2,2.0,.607,1.6,2.4,4.0,0.6,0.8,0.7,0.7,2.1,7.0 -Jalen Jones,SF,25,CLE,16,0,13.4,1.6,3.9,.419,0.6,1.8,.357,1.0,2.1,.471,.500,1.2,1.7,.704,0.7,1.4,2.1,0.4,0.6,0.1,0.5,1.7,5.1 -Jemerrio Jones,SF,23,LAL,6,2,23.8,2.0,5.5,.364,0.3,1.7,.200,1.7,3.8,.435,.394,0.2,0.3,.500,2.7,5.5,8.2,2.2,1.2,0.8,0.8,1.7,4.5 -Terrence Jones,PF,27,HOU,2,0,2.5,0.5,2.0,.250,0.0,1.0,.000,0.5,1.0,.500,.250,0.0,0.5,.000,0.0,2.0,2.0,0.0,0.0,0.0,0.5,0.5,1.0 -Tyus Jones,PG,22,MIN,68,23,22.9,2.7,6.6,.415,0.6,1.9,.317,2.1,4.7,.453,.460,0.9,1.0,.841,0.3,1.6,2.0,4.8,1.2,0.1,0.7,1.1,6.9 -DeAndre Jordan,C,30,TOT,69,69,29.7,4.1,6.5,.641,0.0,0.0,0,4.1,6.5,.641,.641,2.7,3.8,.705,3.3,9.8,13.1,2.3,0.6,1.1,2.2,2.4,11.0 -DeAndre Jordan,C,30,DAL,50,50,31.1,4.2,6.5,.644,0.0,0.0,0,4.2,6.5,.644,.644,2.7,4.0,.682,3.2,10.5,13.7,2.0,0.7,1.1,2.2,2.5,11.0 -DeAndre Jordan,C,30,NYK,19,19,25.9,4.1,6.5,.634,0.0,0.0,0,4.1,6.5,.634,.634,2.7,3.5,.773,3.4,8.0,11.4,3.0,0.5,1.1,2.2,2.2,10.9 -Cory Joseph,PG,27,IND,82,9,25.2,2.8,6.7,.412,0.7,2.1,.322,2.1,4.6,.454,.463,0.4,0.5,.698,0.5,2.9,3.4,3.9,1.1,0.3,1.0,1.6,6.5 -Frank Kaminsky,C,25,CHO,47,0,16.1,2.9,6.3,.463,1.1,3.0,.360,1.9,3.4,.553,.547,1.7,2.3,.738,0.8,2.6,3.5,1.3,0.3,0.3,0.9,1.4,8.6 -Luke Kennard,SG,22,DET,63,10,22.8,3.6,8.3,.438,1.7,4.3,.394,1.9,4.0,.486,.540,0.8,1.0,.836,0.2,2.7,2.9,1.8,0.4,0.2,0.9,1.5,9.7 -Michael Kidd-Gilchrist,PF,25,CHO,64,3,18.4,2.5,5.2,.476,0.3,0.7,.340,2.2,4.5,.498,.500,1.5,1.9,.772,1.4,2.5,3.8,1.0,0.5,0.6,0.7,2.4,6.7 -George King,SF,25,PHO,1,0,6.0,0.0,0.0,0,0.0,0.0,0,0.0,0.0,0,0,0.0,0.0,0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0 -Maxi Kleber,PF,27,DAL,71,18,21.2,2.5,5.4,.453,1.1,3.1,.353,1.4,2.4,.583,.553,0.8,1.0,.784,1.3,3.4,4.6,1.0,0.5,1.1,0.8,2.0,6.8 -Brandon Knight,PG,27,TOT,39,26,18.9,2.5,6.7,.381,1.1,3.3,.318,1.5,3.4,.443,.460,0.7,0.9,.794,0.3,1.3,1.5,1.8,0.5,0.1,0.8,1.6,6.8 -Brandon Knight,PG,27,HOU,12,0,9.8,0.9,3.9,.234,0.4,2.7,.156,0.5,1.3,.400,.287,0.8,0.9,.818,0.1,0.7,0.8,0.8,0.2,0.0,0.4,1.2,3.0 -Brandon Knight,PG,27,CLE,27,26,22.9,3.3,7.9,.413,1.3,3.6,.371,1.9,4.3,.448,.498,0.7,0.9,.783,0.3,1.5,1.9,2.3,0.7,0.1,0.9,1.8,8.5 -Kevin Knox,PF,19,NYK,75,57,28.8,4.5,12.2,.370,1.7,4.9,.343,2.8,7.3,.387,.438,2.2,3.0,.717,0.8,3.7,4.5,1.1,0.6,0.3,1.5,2.3,12.8 -Furkan Korkmaz,SG,21,PHI,48,7,14.1,2.0,5.1,.400,1.0,3.0,.326,1.1,2.1,.505,.496,0.8,0.9,.818,0.3,1.9,2.2,1.1,0.6,0.0,0.5,1.3,5.8 -Luke Kornet,C,23,NYK,46,18,17.0,2.3,6.2,.378,1.5,4.2,.363,0.8,2.0,.411,.502,0.8,1.0,.826,0.6,2.3,2.9,1.2,0.6,0.9,0.5,0.9,7.0 -Kyle Korver,PF,37,CLE,16,0,15.7,2.2,4.8,.461,1.6,3.4,.463,0.6,1.4,.455,.625,0.8,1.0,.813,0.0,1.8,1.8,1.1,0.2,0.1,1.1,1.4,6.8 -Kyle Korver,SG,37,UTA,54,0,20.1,3.1,7.5,.408,2.1,5.4,.384,1.0,2.1,.469,.547,0.9,1.1,.825,0.2,2.3,2.5,1.2,0.4,0.2,0.8,1.6,9.1 -Kosta Koufos,C,29,SAC,42,1,12.0,1.7,3.6,.477,0.0,0.0,0,1.7,3.6,.477,.477,0.2,0.6,.417,1.2,3.0,4.2,0.9,0.4,0.4,0.6,1.6,3.7 -Rodions Kurucs,SF,20,BRK,63,46,20.5,3.2,7.1,.450,0.9,2.9,.315,2.3,4.2,.543,.514,1.1,1.5,.783,0.9,3.0,3.9,0.8,0.7,0.4,1.2,2.3,8.5 -Kyle Kuzma,PF,23,LAL,70,68,33.1,7.1,15.5,.456,1.8,6.0,.303,5.3,9.5,.553,.515,2.7,3.6,.752,0.9,4.6,5.5,2.5,0.6,0.4,1.9,2.4,18.7 -Skal Labissière,PF,22,TOT,22,1,8.0,1.2,2.2,.531,0.3,0.6,.462,0.9,1.6,.556,.592,0.4,0.8,.529,0.4,1.5,2.0,0.5,0.2,0.3,0.5,1.3,3.0 -Skal Labissière,PF,22,SAC,13,0,8.7,1.0,2.3,.433,0.3,0.8,.364,0.7,1.5,.474,.500,0.5,0.8,.545,0.3,1.5,1.8,0.5,0.2,0.2,0.5,1.6,2.8 -Skal Labissière,PF,22,POR,9,1,7.0,1.4,2.1,.684,0.2,0.2,1.000,1.2,1.9,.647,.737,0.3,0.7,.500,0.6,1.6,2.1,0.6,0.3,0.3,0.4,0.9,3.4 -Jeremy Lamb,SG,26,CHO,79,55,28.5,5.5,12.4,.440,1.5,4.2,.348,4.0,8.2,.487,.499,2.9,3.3,.888,0.8,4.7,5.5,2.2,1.1,0.4,1.0,1.8,15.3 -Zach LaVine,SG,23,CHI,63,62,34.5,8.4,18.0,.467,1.9,5.1,.374,6.5,12.9,.504,.520,5.0,6.0,.832,0.6,4.0,4.7,4.5,1.0,0.4,3.4,2.2,23.7 -Jake Layman,SF,24,POR,71,33,18.7,3.0,6.0,.509,0.8,2.5,.326,2.2,3.4,.646,.579,0.7,1.0,.704,0.8,2.3,3.1,0.7,0.4,0.4,0.6,1.6,7.6 -T.J. Leaf,PF,21,IND,58,1,9.0,1.7,3.2,.541,0.1,0.5,.258,1.6,2.6,.599,.563,0.3,0.5,.613,0.7,1.4,2.2,0.4,0.2,0.3,0.2,0.6,3.9 -Courtney Lee,SG,33,TOT,34,6,12.6,1.6,3.8,.411,0.5,1.6,.291,1.1,2.2,.500,.473,0.4,0.6,.667,0.3,1.3,1.6,1.1,0.6,0.1,0.4,0.9,4.0 -Courtney Lee,SG,33,NYK,12,2,13.3,1.8,3.9,.447,0.4,1.3,.313,1.3,2.6,.516,.500,0.8,1.2,.643,0.5,1.8,2.3,1.3,0.7,0.2,0.4,1.1,4.7 -Courtney Lee,SG,33,DAL,22,4,12.2,1.5,3.7,.390,0.5,1.8,.282,1.0,2.0,.488,.457,0.2,0.3,.714,0.1,1.1,1.2,1.0,0.6,0.0,0.4,0.9,3.6 -Damion Lee,SG,26,GSW,32,0,11.7,1.8,4.0,.441,0.8,2.1,.397,0.9,1.8,.492,.547,0.6,0.7,.864,0.3,1.8,2.0,0.4,0.4,0.0,0.3,0.9,4.9 -Walt Lemon Jr.,PG,26,CHI,6,3,27.8,6.3,14.5,.437,0.3,0.8,.400,6.0,13.7,.439,.448,1.3,1.8,.727,0.7,3.8,4.5,5.0,1.8,0.2,1.7,2.3,14.3 -Alex Len,C,25,ATL,77,31,20.1,4.2,8.4,.494,1.0,2.6,.363,3.2,5.8,.554,.551,1.8,2.8,.648,2.1,3.5,5.5,1.1,0.4,0.9,1.3,2.6,11.1 -Kawhi Leonard,SF,27,TOR,60,60,34.0,9.3,18.8,.496,1.9,5.0,.371,7.5,13.8,.542,.546,6.1,7.1,.854,1.3,6.0,7.3,3.3,1.8,0.4,2.0,1.5,26.6 -Meyers Leonard,C,26,POR,61,2,14.4,2.2,4.0,.545,0.8,1.8,.450,1.3,2.1,.626,.649,0.7,0.8,.843,0.8,3.0,3.8,1.2,0.2,0.1,0.7,1.7,5.9 -Jon Leuer,PF,29,DET,41,1,9.8,1.6,2.8,.584,0.0,0.3,.091,1.6,2.5,.637,.588,0.6,0.8,.742,0.7,1.7,2.4,0.3,0.3,0.1,0.6,1.5,3.8 -Caris LeVert,SF,24,BRK,40,25,26.6,5.2,12.1,.429,1.2,3.9,.312,4.0,8.2,.483,.478,2.1,3.1,.691,0.9,2.9,3.8,3.9,1.1,0.4,1.7,1.9,13.7 -Damian Lillard,PG,28,POR,80,80,35.5,8.5,19.2,.444,3.0,8.0,.369,5.6,11.1,.499,.522,5.9,6.4,.912,0.9,3.8,4.6,6.9,1.1,0.4,2.7,1.9,25.8 -Jeremy Lin,PG,30,TOT,74,4,19.4,3.2,7.3,.440,0.7,2.5,.294,2.5,4.8,.517,.491,2.4,2.8,.838,0.3,2.1,2.4,3.1,0.6,0.2,1.7,1.9,9.6 -Jeremy Lin,PG,30,ATL,51,1,19.7,3.5,7.6,.466,0.9,2.6,.333,2.7,5.0,.535,.523,2.8,3.3,.845,0.3,2.0,2.3,3.5,0.7,0.1,1.9,1.9,10.7 -Jeremy Lin,PG,30,TOR,23,3,18.8,2.5,6.7,.374,0.5,2.4,.200,2.0,4.3,.470,.410,1.5,1.8,.810,0.3,2.3,2.6,2.2,0.4,0.3,1.1,2.1,7.0 -Shaun Livingston,PG,33,GSW,64,0,15.1,1.7,3.3,.519,0.0,0.0,.000,1.7,3.3,.524,.519,0.6,0.8,.784,0.7,1.2,1.8,1.8,0.5,0.4,0.6,1.2,4.0 -Zach Lofton,SG,26,DET,1,0,4.0,0.0,1.0,.000,0.0,1.0,.000,0.0,0.0,0,.000,0.0,0.0,0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0 -Kevon Looney,C,22,GSW,80,24,18.5,2.7,4.3,.625,0.0,0.1,.100,2.7,4.2,.641,.627,0.8,1.3,.619,2.4,2.8,5.2,1.5,0.6,0.7,0.6,2.6,6.3 -Brook Lopez,C,30,MIL,81,81,28.7,4.4,9.7,.452,2.3,6.3,.365,2.1,3.4,.613,.571,1.4,1.6,.842,0.4,4.5,4.9,1.2,0.6,2.2,1.0,2.3,12.5 -Robin Lopez,C,30,CHI,74,36,21.7,4.1,7.2,.568,0.1,0.4,.226,4.0,6.8,.589,.575,1.2,1.7,.724,1.9,2.0,3.9,1.2,0.1,1.1,1.3,1.7,9.5 -Kevin Love,PF,30,CLE,22,21,27.2,5.0,12.9,.385,2.4,6.7,.361,2.5,6.2,.412,.479,4.7,5.2,.904,1.5,9.4,10.9,2.2,0.3,0.2,1.9,2.5,17.0 -Kyle Lowry,PG,32,TOR,65,65,34.0,4.7,11.4,.411,2.4,7.0,.347,2.3,4.4,.514,.518,2.5,3.0,.830,0.6,4.2,4.8,8.7,1.4,0.5,2.8,2.6,14.2 -Jordan Loyd,PG,25,TOR,12,0,4.6,0.7,1.5,.444,0.3,0.7,.500,0.3,0.8,.400,.556,0.8,0.9,.818,0.1,0.7,0.8,0.5,0.0,0.0,0.2,0.4,2.4 -Kalin Lucas,PG,29,DET,1,0,6.0,0.0,1.0,.000,0.0,1.0,.000,0.0,0.0,0,.000,2.0,2.0,1.000,0.0,3.0,3.0,1.0,0.0,0.0,0.0,1.0,2.0 -Timothé Luwawu-Cabarrot,SF,23,TOT,50,7,13.4,1.6,4.4,.376,0.7,2.3,.310,0.9,2.0,.451,.459,0.6,0.8,.756,0.2,1.7,1.9,0.5,0.4,0.2,0.4,1.5,4.6 -Timothé Luwawu-Cabarrot,SF,23,OKC,21,1,5.9,0.6,2.0,.302,0.2,1.0,.227,0.4,1.0,.381,.360,0.2,0.3,.667,0.2,0.7,0.9,0.2,0.2,0.0,0.2,0.7,1.7 -Timothé Luwawu-Cabarrot,SF,23,CHI,29,6,18.8,2.4,6.0,.394,1.1,3.2,.330,1.3,2.8,.469,.483,0.9,1.2,.771,0.2,2.5,2.7,0.8,0.5,0.2,0.6,2.0,6.8 -Tyler Lydon,PF,22,DEN,25,0,3.8,0.4,0.7,.500,0.2,0.4,.400,0.2,0.3,.625,.611,0.0,0.1,.333,0.3,0.4,0.7,0.2,0.1,0.0,0.2,0.4,0.9 -Trey Lyles,PF,23,DEN,64,2,17.5,3.2,7.7,.418,0.8,3.1,.255,2.4,4.6,.529,.470,1.3,1.8,.698,0.7,3.2,3.8,1.4,0.5,0.4,1.1,1.5,8.5 -Scott Machado,PG,28,LAL,4,0,4.8,1.0,1.5,.667,0.3,0.3,1.000,0.8,1.3,.600,.750,0.3,0.3,1.000,0.0,0.0,0.0,0.8,0.3,0.0,0.0,0.5,2.5 -Shelvin Mack,PG,28,TOT,57,3,21.9,2.9,7.1,.404,0.8,2.3,.354,2.1,4.8,.429,.462,1.0,1.5,.690,0.3,1.5,1.8,3.2,0.8,0.1,1.2,1.5,7.5 -Shelvin Mack,PG,28,MEM,53,3,22.7,3.0,7.3,.414,0.9,2.4,.359,2.2,4.9,.441,.473,1.0,1.4,.707,0.3,1.6,1.9,3.4,0.8,0.1,1.2,1.5,7.9 -Shelvin Mack,PG,28,CHO,4,0,10.5,0.5,3.5,.143,0.0,0.5,.000,0.5,3.0,.167,.143,1.3,2.3,.556,0.0,0.5,0.5,0.3,0.5,0.0,1.0,0.8,2.3 -Daryl Macon,SG,23,DAL,8,0,11.3,1.3,3.4,.370,0.6,1.4,.455,0.6,2.0,.313,.463,0.5,0.9,.571,0.3,1.3,1.5,0.9,0.1,0.0,0.8,1.0,3.6 -J.P. Macura,SG,23,CHO,2,0,8.5,1.5,4.5,.333,0.0,2.0,.000,1.5,2.5,.600,.333,0.0,0.0,0,0.0,1.5,1.5,1.0,0.0,0.0,0.0,0.0,3.0 -Ian Mahinmi,C,32,WAS,34,6,14.6,1.4,3.1,.452,0.1,0.5,.188,1.3,2.6,.500,.466,1.2,1.8,.689,1.4,2.4,3.8,0.7,0.7,0.5,0.6,2.5,4.1 -Thon Maker,C,21,MIL,35,0,11.7,1.7,3.8,.440,0.7,2.2,.333,0.9,1.6,.589,.537,0.6,1.1,.541,0.5,2.3,2.7,0.5,0.3,0.5,0.3,1.6,4.7 -Thon Maker,PF,21,DET,29,5,19.4,1.7,4.6,.373,0.8,2.6,.307,0.9,2.0,.458,.459,1.2,1.6,.766,0.8,2.8,3.7,0.9,0.4,1.1,0.8,1.7,5.5 -Boban Marjanović,C,30,TOT,58,12,11.7,2.8,4.5,.615,0.1,0.2,.400,2.7,4.3,.624,.623,1.7,2.3,.748,1.5,3.1,4.6,0.9,0.3,0.5,1.0,1.6,7.3 -Boban Marjanović,C,30,LAC,36,9,10.4,2.4,3.9,.607,0.0,0.1,.000,2.4,3.8,.616,.607,2.0,2.6,.758,1.5,2.7,4.2,0.6,0.3,0.5,1.0,1.6,6.7 -Boban Marjanović,C,30,PHI,22,3,13.9,3.4,5.5,.625,0.2,0.4,.500,3.2,5.1,.634,.642,1.2,1.6,.722,1.5,3.7,5.1,1.5,0.2,0.5,1.0,1.5,8.2 -Lauri Markkanen,PF,21,CHI,52,51,32.3,6.6,15.3,.430,2.3,6.4,.361,4.3,8.9,.479,.506,3.3,3.8,.872,1.4,7.6,9.0,1.4,0.7,0.6,1.6,2.3,18.7 -Jarell Martin,PF,24,ORL,42,1,7.8,1.0,2.5,.413,0.5,1.4,.351,0.5,1.1,.489,.510,0.2,0.3,.818,0.3,1.5,1.7,0.4,0.1,0.2,0.3,1.2,2.7 -Frank Mason III,PG,24,SAC,38,0,11.4,1.9,4.4,.420,0.4,1.7,.219,1.5,2.8,.543,.462,1.0,1.5,.684,0.2,1.0,1.1,2.2,0.4,0.1,0.9,0.9,5.1 -Yante Maten,PF,22,MIA,2,0,6.5,0.5,2.0,.250,0.0,0.5,.000,0.5,1.5,.333,.250,0.0,0.0,0,0.5,1.0,1.5,0.0,0.5,0.0,0.0,0.0,1.0 -Wesley Matthews,SF,32,DAL,44,44,29.8,4.4,10.7,.414,2.3,6.0,.380,2.1,4.7,.456,.520,2.0,2.5,.791,0.5,1.9,2.3,2.3,0.8,0.3,1.3,2.3,13.1 -Wesley Matthews,SG,32,NYK,2,1,27.0,2.0,9.5,.211,1.0,5.0,.200,1.0,4.5,.222,.263,2.0,2.5,.800,0.5,1.0,1.5,2.5,0.5,0.5,1.0,1.0,7.0 -Wesley Matthews,SG,32,IND,23,23,31.5,3.5,9.1,.386,2.1,5.7,.369,1.4,3.5,.413,.500,1.8,2.1,.854,0.5,2.3,2.8,2.4,0.9,0.2,1.3,2.4,10.9 -Luc Mbah a Moute,PF,32,LAC,4,0,15.3,2.0,4.5,.444,0.5,1.5,.333,1.5,3.0,.500,.500,0.5,1.3,.400,0.5,1.3,1.8,0.5,0.3,0.3,0.5,2.0,5.0 -Tahjere McCall,SG,24,BRK,1,0,8.0,2.0,3.0,.667,0.0,1.0,.000,2.0,2.0,1.000,.667,0.0,1.0,.000,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,4.0 -Patrick McCaw,SG,23,TOT,29,1,13.7,0.9,2.2,.413,0.3,1.0,.321,0.6,1.2,.486,.484,0.4,0.5,.867,0.2,1.4,1.7,1.0,0.8,0.1,0.6,1.3,2.6 -Patrick McCaw,SG,23,CLE,3,0,17.7,0.7,3.0,.222,0.3,1.3,.250,0.3,1.7,.200,.278,0.0,0.0,0,0.0,1.0,1.0,0.7,0.7,0.0,1.0,0.3,1.7 -Patrick McCaw,SG,23,TOR,26,1,13.2,0.9,2.1,.444,0.3,0.9,.333,0.6,1.2,.533,.519,0.5,0.6,.867,0.3,1.5,1.7,1.0,0.8,0.1,0.5,1.4,2.7 -CJ McCollum,SG,27,POR,70,70,33.9,8.2,17.8,.459,2.4,6.4,.375,5.8,11.4,.506,.527,2.3,2.7,.828,0.9,3.1,4.0,3.0,0.8,0.4,1.5,2.5,21.0 -T.J. McConnell,PG,26,PHI,76,3,19.3,2.9,5.5,.525,0.2,0.6,.333,2.7,5.0,.546,.542,0.4,0.5,.784,0.4,1.9,2.3,3.4,1.0,0.2,1.2,1.4,6.4 -Doug McDermott,SF,27,IND,77,1,17.4,2.7,5.5,.491,1.1,2.7,.408,1.6,2.8,.569,.590,0.9,1.0,.835,0.2,1.2,1.4,0.9,0.2,0.1,0.5,1.4,7.3 -JaVale McGee,C,31,LAL,75,62,22.3,5.3,8.5,.624,0.0,0.2,.083,5.3,8.4,.634,.625,1.3,2.0,.632,2.6,4.9,7.5,0.7,0.6,2.0,1.4,2.8,12.0 -Rodney McGruder,SG,27,MIA,66,45,23.5,2.8,7.0,.403,1.2,3.4,.351,1.6,3.6,.453,.489,0.8,1.1,.722,0.9,2.7,3.6,1.7,0.5,0.2,1.0,1.7,7.6 -Alfonzo McKinnie,SF,26,GSW,72,5,13.9,1.9,3.8,.487,0.6,1.6,.356,1.3,2.2,.586,.564,0.4,0.7,.563,1.1,2.3,3.4,0.4,0.3,0.2,0.4,1.9,4.7 -Ben McLemore,SG,25,SAC,19,0,8.3,1.3,3.4,.391,0.9,2.2,.415,0.4,1.2,.348,.523,0.4,0.6,.667,0.2,0.7,0.9,0.2,0.3,0.2,0.3,1.2,3.9 -Jordan McRae,SG,27,WAS,27,0,12.3,2.3,4.8,.469,0.4,1.3,.286,1.9,3.5,.537,.508,1.0,1.3,.800,0.2,1.3,1.5,1.1,0.5,0.3,0.6,1.0,5.9 -Jodie Meeks,SG,31,TOR,8,0,13.0,2.6,4.9,.538,1.0,2.3,.444,1.6,2.6,.619,.641,0.1,0.1,1.000,0.1,1.4,1.5,1.0,0.1,0.1,0.3,0.6,6.4 -Salah Mejri,C,32,DAL,36,4,11.1,1.5,3.1,.491,0.3,0.9,.324,1.2,2.2,.564,.540,0.6,0.9,.625,1.0,2.6,3.6,1.0,0.3,0.7,0.6,1.5,3.9 -De'Anthony Melton,PG,20,PHO,50,31,19.7,2.0,5.1,.391,0.6,1.9,.305,1.4,3.2,.441,.447,0.4,0.6,.750,0.5,2.2,2.7,3.2,1.4,0.5,1.5,2.3,5.0 -Chimezie Metu,PF,21,SAS,29,0,5.0,0.7,2.0,.328,0.0,0.1,.000,0.7,1.9,.339,.328,0.4,0.6,.765,0.3,0.9,1.2,0.4,0.2,0.1,0.5,0.5,1.8 -Khris Middleton,SF,27,MIL,77,77,31.1,6.6,14.9,.441,2.3,6.2,.378,4.2,8.8,.485,.519,2.8,3.4,.837,0.6,5.3,6.0,4.3,1.0,0.1,2.3,2.2,18.3 -C.J. Miles,SF,31,TOT,53,1,16.2,2.1,5.9,.360,1.3,3.8,.330,0.8,2.0,.417,.468,0.9,1.1,.828,0.2,1.5,1.8,0.7,0.5,0.3,0.6,1.5,6.4 -C.J. Miles,SF,31,TOR,40,1,14.1,1.8,5.2,.340,1.1,3.4,.314,0.7,1.7,.391,.444,0.9,1.1,.795,0.3,1.4,1.7,0.6,0.5,0.3,0.5,1.6,5.5 -C.J. Miles,SF,31,MEM,13,0,22.6,3.2,8.1,.400,1.8,5.1,.364,1.4,3.0,.462,.514,1.0,1.1,.929,0.1,2.0,2.1,1.1,0.6,0.4,0.7,1.3,9.3 -Darius Miller,SF,28,NOP,69,15,25.5,2.7,7.0,.390,1.9,5.3,.365,0.8,1.7,.467,.528,0.8,1.0,.789,0.2,1.7,1.9,2.1,0.6,0.3,0.9,2.4,8.2 -Malcolm Miller,SF,25,TOR,10,0,6.7,1.1,2.6,.423,1.0,2.1,.476,0.1,0.5,.200,.615,0.3,0.4,.750,0.1,0.4,0.5,0.1,0.1,0.1,0.1,0.5,3.5 -Patty Mills,PG,30,SAS,82,1,23.3,3.4,8.1,.425,1.9,4.9,.394,1.5,3.2,.475,.545,1.1,1.3,.854,0.3,1.9,2.2,3.0,0.6,0.1,1.1,1.6,9.9 -Paul Millsap,PF,33,DEN,70,65,27.1,4.6,9.5,.484,0.8,2.3,.365,3.8,7.2,.522,.528,2.6,3.6,.727,2.2,5.0,7.2,2.0,1.2,0.8,1.4,2.6,12.6 -Shake Milton,SG,22,PHI,20,0,13.4,1.7,4.4,.391,0.7,2.2,.318,1.0,2.2,.465,.471,0.3,0.4,.714,0.5,1.3,1.8,0.9,0.4,0.4,0.3,1.5,4.4 -Nikola Mirotić,PF,27,TOT,46,25,27.1,5.2,11.8,.439,2.5,6.9,.365,2.7,4.9,.545,.546,2.3,2.7,.847,1.3,6.1,7.4,1.2,0.7,0.7,1.0,2.3,15.2 -Nikola Mirotić,PF,27,NOP,32,22,28.9,5.7,12.7,.447,2.7,7.2,.368,3.0,5.5,.551,.552,2.7,3.2,.842,1.4,6.8,8.3,1.1,0.7,0.8,1.2,2.7,16.7 -Nikola Mirotić,PF,27,MIL,14,3,22.9,4.0,9.6,.415,2.2,6.2,.356,1.8,3.4,.521,.530,1.4,1.6,.870,1.0,4.4,5.4,1.4,0.7,0.6,0.8,1.5,11.6 -Donovan Mitchell,SG,22,UTA,77,77,33.7,8.6,19.9,.432,2.4,6.7,.362,6.1,13.1,.468,.493,4.1,5.1,.806,0.8,3.3,4.1,4.2,1.4,0.4,2.8,2.7,23.8 -Naz Mitrou-Long,SG,25,UTA,14,0,6.0,0.4,1.4,.300,0.1,0.8,.182,0.3,0.6,.444,.350,0.1,0.1,1.000,0.1,0.4,0.4,1.1,0.1,0.1,0.6,0.7,1.1 -Malik Monk,SG,20,CHO,73,0,17.2,3.1,8.0,.387,1.5,4.5,.330,1.6,3.5,.461,.480,1.2,1.4,.882,0.2,1.7,1.9,1.6,0.5,0.3,1.2,1.5,8.9 -Greg Monroe,C,28,TOT,43,2,11.2,2.1,4.4,.487,0.0,0.1,.200,2.1,4.3,.495,.489,1.0,1.7,.625,1.6,2.4,4.0,0.6,0.3,0.2,0.7,1.5,5.3 -Greg Monroe,C,28,TOR,38,2,11.1,1.9,4.2,.460,0.0,0.1,.000,1.9,4.1,.471,.460,0.9,1.6,.574,1.6,2.5,4.1,0.4,0.3,0.2,0.8,1.6,4.8 -Greg Monroe,C,28,BOS,2,0,2.5,1.5,2.5,.600,0.0,0.0,0,1.5,2.5,.600,.600,0.0,0.0,0,1.5,0.0,1.5,0.5,0.0,0.0,0.0,0.0,3.0 -Greg Monroe,C,28,PHI,3,0,17.3,5.0,7.7,.652,0.3,0.3,1.000,4.7,7.3,.636,.674,3.3,3.7,.909,1.3,3.0,4.3,2.3,0.3,0.0,0.3,1.7,13.7 -E'Twaun Moore,SG,29,NOP,53,36,27.6,4.8,10.0,.481,1.4,3.3,.432,3.4,6.7,.506,.553,0.8,1.1,.763,0.7,1.7,2.4,1.9,0.8,0.2,1.1,2.1,11.9 -Eric Moreland,PF,27,TOT,5,0,8.6,0.6,1.4,.429,0.2,0.2,1.000,0.4,1.2,.333,.500,0.0,0.0,0,0.8,3.2,4.0,0.8,0.2,0.2,1.0,1.8,1.4 -Eric Moreland,PF,27,PHO,1,0,5.0,0.0,0.0,0,0.0,0.0,0,0.0,0.0,0,0,0.0,0.0,0,0.0,3.0,3.0,0.0,0.0,0.0,0.0,1.0,0.0 -Eric Moreland,PF,27,TOR,4,0,9.5,0.8,1.8,.429,0.3,0.3,1.000,0.5,1.5,.333,.500,0.0,0.0,0,1.0,3.3,4.3,1.0,0.3,0.3,1.3,2.0,1.8 -Jaylen Morris,SG,23,MIL,4,0,7.3,1.0,2.5,.400,0.3,0.8,.333,0.8,1.8,.429,.450,0.3,0.5,.500,0.0,1.3,1.3,1.0,0.5,0.0,0.3,0.3,2.5 -Marcus Morris,PF,29,BOS,75,53,27.9,5.0,11.3,.447,1.9,5.2,.375,3.1,6.1,.508,.533,1.9,2.3,.844,1.0,5.1,6.1,1.5,0.6,0.3,1.2,2.4,13.9 -Markieff Morris,PF,29,TOT,58,16,21.9,3.5,8.3,.421,1.2,3.5,.335,2.3,4.8,.484,.492,1.2,1.6,.772,1.1,3.5,4.6,1.4,0.6,0.4,0.9,3.0,9.4 -Markieff Morris,PF,29,WAS,34,15,26.0,4.2,9.6,.436,1.4,4.2,.333,2.8,5.4,.516,.509,1.7,2.1,.781,1.2,3.9,5.1,1.8,0.7,0.6,1.4,3.4,11.5 -Markieff Morris,PF,29,OKC,24,1,16.1,2.5,6.5,.391,0.8,2.5,.339,1.7,4.0,.423,.455,0.6,0.8,.737,0.8,3.0,3.8,0.8,0.5,0.1,0.3,2.4,6.5 -Monte Morris,PG,23,DEN,82,6,24.0,4.2,8.6,.493,1.1,2.8,.414,3.1,5.8,.531,.560,0.8,1.0,.802,0.4,1.9,2.4,3.6,0.9,0.0,0.6,1.2,10.4 -Donatas Motiejūnas,PF,28,SAS,3,0,4.3,1.0,2.0,.500,0.0,0.0,0,1.0,2.0,.500,.500,0.0,0.7,.000,0.7,0.3,1.0,0.3,0.0,0.3,1.0,2.0,2.0 -Johnathan Motley,PF,23,LAC,22,0,7.1,1.8,3.3,.534,0.0,0.1,.000,1.8,3.2,.557,.534,1.1,1.8,.600,0.8,1.5,2.3,0.5,0.2,0.1,0.7,1.2,4.6 -Emmanuel Mudiay,PG,22,NYK,59,42,27.2,5.6,12.5,.446,1.2,3.6,.329,4.4,9.0,.492,.493,2.4,3.2,.774,0.6,2.8,3.3,3.9,0.7,0.3,2.4,1.7,14.8 -Jamal Murray,PG,21,DEN,75,74,32.6,6.8,15.6,.437,2.0,5.5,.367,4.8,10.1,.476,.502,2.5,3.0,.848,0.9,3.4,4.2,4.8,0.9,0.4,2.1,2.0,18.2 -Džanan Musa,SG,19,BRK,9,0,4.3,1.0,2.4,.409,0.1,1.1,.100,0.9,1.3,.667,.432,0.0,0.2,.000,0.1,0.4,0.6,0.2,0.2,0.0,0.4,0.1,2.1 -Mike Muscala,PF,27,TOT,64,10,20.4,2.3,5.6,.402,1.4,4.0,.348,0.9,1.6,.533,.525,1.1,1.3,.824,0.9,2.9,3.8,1.2,0.3,0.6,0.8,2.0,7.0 -Mike Muscala,PF,27,PHI,47,6,22.1,2.3,5.9,.392,1.4,4.2,.342,0.9,1.7,.519,.514,1.3,1.6,.818,1.1,3.2,4.3,1.3,0.4,0.6,0.9,2.4,7.4 -Mike Muscala,PF,27,LAL,17,4,15.6,2.1,4.9,.434,1.2,3.4,.368,0.9,1.5,.577,.560,0.4,0.5,.875,0.4,2.2,2.6,0.8,0.2,0.6,0.3,1.1,5.9 -Svi Mykhailiuk,SF,21,TOT,42,0,10.5,1.1,3.3,.329,0.7,2.1,.326,0.4,1.2,.333,.432,0.3,0.5,.600,0.2,0.6,0.9,0.9,0.3,0.0,0.5,0.6,3.2 -Svi Mykhailiuk,SF,21,LAL,39,0,10.8,1.1,3.4,.333,0.7,2.2,.318,0.4,1.2,.362,.436,0.3,0.5,.600,0.2,0.7,0.9,0.8,0.3,0.0,0.5,0.6,3.3 -Svi Mykhailiuk,SF,21,DET,3,0,6.7,0.7,2.7,.250,0.7,1.3,.500,0.0,1.3,.000,.375,0.0,0.0,0,0.3,0.3,0.7,1.3,0.3,0.0,0.3,0.0,2.0 -Abdel Nader,SF,25,OKC,61,1,11.4,1.5,3.5,.423,0.5,1.6,.320,1.0,1.9,.513,.498,0.4,0.6,.750,0.2,1.7,1.9,0.3,0.3,0.2,0.4,1.1,4.0 -Larry Nance Jr.,C,26,CLE,67,30,26.8,3.7,7.1,.520,0.5,1.5,.337,3.2,5.7,.567,.554,1.4,2.0,.716,2.5,5.7,8.2,3.2,1.5,0.6,1.4,2.9,9.4 -Shabazz Napier,PG,27,BRK,56,2,17.6,3.0,7.8,.389,1.4,4.1,.333,1.7,3.7,.449,.476,2.1,2.5,.833,0.3,1.5,1.8,2.6,0.7,0.3,1.2,1.2,9.4 -Raul Neto,PG,26,UTA,37,1,12.8,2.0,4.4,.460,0.5,1.6,.333,1.5,2.7,.535,.522,0.8,0.9,.848,0.2,1.5,1.7,2.5,0.4,0.1,0.9,1.3,5.3 -Georges Niang,PF,25,UTA,59,0,8.7,1.5,3.1,.475,0.7,1.8,.410,0.7,1.3,.566,.594,0.3,0.4,.833,0.2,1.3,1.5,0.6,0.2,0.1,0.4,1.0,4.0 -Joakim Noah,C,33,MEM,42,1,16.5,2.6,5.1,.516,0.0,0.0,.000,2.6,5.0,.519,.516,1.9,2.6,.716,1.4,4.3,5.7,2.1,0.5,0.7,1.2,2.3,7.1 -Nerlens Noel,C,24,OKC,77,2,13.7,2.1,3.6,.587,0.0,0.0,0,2.1,3.6,.587,.587,0.7,1.0,.684,1.6,2.6,4.2,0.6,0.9,1.2,0.6,2.2,4.9 -Dirk Nowitzki*,PF,40,DAL,51,20,15.6,2.6,7.4,.359,1.3,4.0,.312,1.4,3.4,.415,.444,0.8,1.0,.780,0.1,3.0,3.1,0.7,0.2,0.4,0.4,1.5,7.3 -Frank Ntilikina,PG,20,NYK,43,16,21.0,2.2,6.6,.337,0.8,2.7,.287,1.4,3.9,.371,.395,0.5,0.7,.767,0.3,1.7,2.0,2.8,0.7,0.3,1.3,2.4,5.7 -James Nunnally,SF,28,TOT,15,0,6.8,0.8,2.3,.353,0.5,1.7,.320,0.3,0.6,.444,.471,0.3,0.3,1.000,0.0,0.3,0.3,0.5,0.1,0.0,0.1,0.7,2.4 -James Nunnally,SF,28,MIN,13,0,4.9,0.7,1.6,.429,0.4,1.0,.385,0.3,0.6,.500,.548,0.3,0.3,1.000,0.0,0.3,0.3,0.4,0.1,0.0,0.1,0.3,2.1 -James Nunnally,SF,28,HOU,2,0,19.0,1.5,6.5,.231,1.5,6.0,.250,0.0,0.5,.000,.346,0.0,0.0,0,0.0,0.5,0.5,1.0,0.0,0.0,0.0,3.0,4.5 -Jusuf Nurkić,C,24,POR,72,72,27.4,5.8,11.5,.508,0.0,0.4,.103,5.8,11.1,.523,.510,3.9,5.1,.773,3.4,7.0,10.4,3.2,1.0,1.4,2.3,3.5,15.6 -David Nwaba,SF,26,CLE,51,14,19.3,2.5,5.1,.481,0.5,1.5,.320,2.0,3.7,.545,.527,1.1,1.7,.682,0.8,2.4,3.2,1.1,0.7,0.3,0.6,2.1,6.5 -Royce O'Neale,SF,25,UTA,82,16,20.4,2.0,4.2,.475,0.8,2.1,.386,1.2,2.0,.569,.574,0.4,0.5,.762,0.3,3.2,3.5,1.5,0.7,0.3,0.9,2.1,5.2 -Kyle O'Quinn,C,28,IND,45,3,8.2,1.5,3.0,.507,0.0,0.3,.083,1.5,2.8,.548,.511,0.4,0.5,.810,0.6,2.0,2.6,1.2,0.2,0.6,0.7,1.5,3.5 -Semi Ojeleye,PF,24,BOS,56,3,10.6,1.2,2.8,.424,0.5,1.6,.315,0.7,1.2,.565,.513,0.4,0.7,.615,0.4,1.1,1.5,0.4,0.2,0.1,0.3,0.8,3.3 -Jahlil Okafor,C,23,NOP,59,24,15.8,3.6,6.1,.586,0.0,0.1,.200,3.6,6.1,.591,.587,1.0,1.5,.663,1.4,3.3,4.7,0.7,0.3,0.7,0.9,1.6,8.2 -Elie Okobo,PG,21,PHO,53,16,18.1,2.2,5.5,.393,0.7,2.5,.295,1.4,3.0,.475,.460,0.7,0.9,.787,0.2,1.6,1.8,2.4,0.6,0.1,1.3,2.1,5.7 -Josh Okogie,SG,20,MIN,74,52,23.7,2.6,6.9,.386,0.8,2.9,.279,1.8,4.0,.464,.445,1.6,2.2,.728,0.6,2.4,2.9,1.2,1.2,0.4,0.9,2.2,7.7 -Victor Oladipo,SG,26,IND,36,36,31.9,6.9,16.3,.423,2.1,6.0,.343,4.9,10.3,.470,.486,2.9,3.9,.730,0.6,5.0,5.6,5.2,1.7,0.3,2.3,2.0,18.8 -Kelly Olynyk,PF,27,MIA,79,36,22.9,3.3,7.1,.463,1.4,4.0,.354,1.9,3.1,.604,.563,1.9,2.3,.822,0.9,3.8,4.7,1.8,0.7,0.5,1.4,2.3,10.0 -Cedi Osman,SF,23,CLE,76,75,32.2,4.7,11.1,.427,1.7,4.9,.348,3.0,6.2,.489,.504,1.9,2.4,.779,0.6,4.1,4.7,2.6,0.8,0.1,1.5,2.6,13.0 -Kelly Oubre Jr.,SF,23,TOT,69,19,28.0,5.4,12.2,.445,1.6,4.9,.320,3.9,7.3,.530,.510,2.7,3.5,.775,1.0,3.7,4.7,1.2,1.2,0.9,1.5,2.6,15.2 -Kelly Oubre Jr.,SF,23,WAS,29,7,26.0,4.6,10.5,.433,1.4,4.6,.311,3.1,6.0,.526,.500,2.3,2.9,.800,0.8,3.6,4.4,0.7,0.9,0.7,1.0,2.4,12.9 -Kelly Oubre Jr.,SF,23,PHO,40,12,29.5,6.1,13.4,.453,1.7,5.2,.325,4.4,8.3,.532,.515,3.0,4.0,.761,1.2,3.7,4.9,1.6,1.4,1.0,1.8,2.8,16.9 -Zaza Pachulia,C,34,DET,68,3,12.9,1.3,2.8,.440,0.0,0.1,.000,1.3,2.8,.450,.440,1.4,1.8,.782,1.5,2.4,3.9,1.3,0.5,0.3,0.8,2.2,3.9 -Jabari Parker,PF,23,TOT,64,17,26.9,5.8,11.7,.493,1.0,3.0,.313,4.8,8.7,.556,.533,2.0,2.9,.712,1.2,5.3,6.6,2.4,0.7,0.5,2.4,2.3,14.5 -Jabari Parker,PF,23,CHI,39,17,26.7,5.6,11.9,.474,0.9,2.9,.325,4.7,9.0,.523,.514,2.0,2.8,.731,1.1,5.1,6.2,2.2,0.6,0.4,2.2,2.2,14.3 -Jabari Parker,PF,23,WAS,25,0,27.3,6.0,11.4,.523,1.0,3.2,.296,5.0,8.2,.613,.565,2.1,3.0,.684,1.5,5.7,7.2,2.7,0.9,0.6,2.7,2.3,15.0 -Tony Parker*,PG,36,CHO,56,0,17.9,3.8,8.3,.460,0.2,0.9,.255,3.6,7.4,.485,.474,1.6,2.2,.734,0.3,1.2,1.5,3.7,0.4,0.1,1.3,0.9,9.5 -Chandler Parsons,SF,30,MEM,25,3,19.8,2.7,7.3,.374,1.2,3.8,.309,1.6,3.5,.443,.453,0.9,1.0,.880,0.2,2.6,2.8,1.7,0.8,0.2,1.3,1.8,7.5 -Patrick Patterson,PF,29,OKC,63,5,13.7,1.3,3.5,.374,0.7,2.2,.336,0.6,1.3,.439,.479,0.3,0.5,.633,0.7,1.7,2.3,0.5,0.3,0.2,0.3,0.7,3.6 -Justin Patton,C,21,PHI,3,0,7.0,0.7,2.3,.286,0.0,0.7,.000,0.7,1.7,.400,.286,0.3,0.7,.500,0.7,1.3,2.0,1.0,0.7,0.0,0.0,1.7,1.7 -Chris Paul,PG,33,HOU,58,58,32.0,5.2,12.4,.419,2.2,6.1,.358,3.0,6.3,.479,.508,3.0,3.5,.862,0.6,3.9,4.6,8.2,2.0,0.3,2.6,2.5,15.6 -Cameron Payne,PG,24,TOT,40,13,17.8,2.4,5.6,.430,0.6,2.1,.298,1.8,3.5,.511,.487,0.8,1.0,.805,0.3,1.5,1.8,2.7,0.7,0.2,1.1,1.6,6.3 -Cameron Payne,PG,24,CHI,31,12,17.3,2.2,5.4,.411,0.5,1.9,.271,1.7,3.5,.486,.458,0.7,0.8,.880,0.3,1.4,1.7,2.7,0.6,0.2,1.1,1.6,5.7 -Cameron Payne,PG,24,CLE,9,1,19.6,3.0,6.1,.491,1.0,2.8,.360,2.0,3.3,.600,.573,1.2,1.8,.688,0.3,1.8,2.1,2.6,0.9,0.3,1.2,1.7,8.2 -Elfrid Payton,PG,24,NOP,42,42,29.8,4.3,9.8,.434,0.8,2.5,.314,3.5,7.3,.476,.475,1.3,1.8,.743,1.2,4.1,5.2,7.6,1.0,0.4,2.7,1.9,10.6 -Gary Payton II,PG,26,WAS,3,0,5.3,1.7,2.7,.625,0.3,0.7,.500,1.3,2.0,.667,.688,0.0,0.0,0,0.3,0.3,0.7,1.3,1.0,0.3,0.3,0.7,3.7 -Theo Pinson,SG,23,BRK,18,0,11.7,1.4,4.1,.342,0.7,2.6,.261,0.7,1.5,.481,.425,1.1,1.2,.864,0.2,1.8,2.0,1.2,0.3,0.0,1.0,0.8,4.5 -Mason Plumlee,C,28,DEN,82,17,21.1,3.2,5.4,.593,0.0,0.1,.200,3.2,5.3,.602,.595,1.4,2.4,.561,2.0,4.4,6.4,3.0,0.8,0.9,1.5,3.1,7.8 -Miles Plumlee,C,30,ATL,18,0,9.6,1.8,2.7,.667,0.0,0.0,0,1.8,2.7,.667,.667,0.9,1.7,.533,0.9,1.3,2.2,0.9,0.3,0.2,0.6,0.8,4.4 -Jakob Poeltl,C,23,SAS,77,24,16.5,2.4,3.8,.645,0.0,0.0,0,2.4,3.8,.645,.645,0.6,1.2,.533,2.3,3.0,5.3,1.2,0.4,0.9,0.6,1.6,5.5 -Quincy Pondexter,SF,30,SAS,53,0,5.5,0.5,1.1,.500,0.1,0.3,.333,0.4,0.8,.575,.552,0.6,0.8,.810,0.2,0.7,0.9,0.5,0.2,0.0,0.2,0.5,1.8 -Otto Porter Jr.,SF,25,TOT,56,43,30.1,5.3,11.5,.465,1.9,4.6,.406,3.5,6.9,.504,.546,1.4,1.7,.813,1.0,4.6,5.6,2.1,1.5,0.6,1.2,1.9,13.9 -Otto Porter Jr.,SF,25,WAS,41,28,29.0,4.9,10.8,.457,1.6,4.3,.369,3.3,6.5,.515,.531,1.2,1.6,.766,1.0,4.7,5.6,2.0,1.6,0.5,1.0,1.9,12.6 -Otto Porter Jr.,SF,25,CHI,15,15,32.8,6.5,13.4,.483,2.6,5.3,.488,3.9,8.1,.479,.580,1.9,2.1,.906,0.9,4.6,5.5,2.7,1.2,0.6,1.7,1.9,17.5 -Bobby Portis,PF,23,TOT,50,28,26.0,5.6,12.6,.444,1.5,3.8,.393,4.1,8.7,.467,.504,1.5,1.9,.794,2.2,5.9,8.1,1.4,0.7,0.4,1.5,2.9,14.2 -Bobby Portis,PF,23,CHI,22,6,24.1,5.5,12.3,.450,1.2,3.3,.375,4.3,9.0,.477,.500,1.8,2.3,.780,2.1,5.2,7.3,1.3,0.5,0.4,1.3,2.8,14.1 -Bobby Portis,PF,23,WAS,28,22,27.4,5.6,12.8,.440,1.7,4.3,.403,3.9,8.5,.458,.507,1.4,1.7,.809,2.3,6.4,8.6,1.5,0.9,0.4,1.6,3.0,14.3 -Dwight Powell,C,27,DAL,77,22,21.6,3.8,6.3,.597,0.5,1.6,.307,3.3,4.7,.699,.637,2.5,3.3,.772,1.8,3.5,5.3,1.5,0.6,0.6,0.9,2.6,10.6 -Norman Powell,SG,25,TOR,60,3,18.8,3.2,6.7,.483,1.1,2.8,.400,2.1,3.8,.543,.568,1.0,1.3,.827,0.3,2.1,2.3,1.5,0.7,0.2,1.1,1.6,8.6 -Alex Poythress,PF,25,ATL,21,1,14.5,1.9,3.9,.494,0.4,1.1,.391,1.5,2.8,.534,.549,0.9,1.4,.621,1.4,2.2,3.6,0.8,0.2,0.5,0.6,2.2,5.1 -Taurean Prince,SF,24,ATL,55,47,28.2,4.8,10.8,.441,2.2,5.7,.390,2.5,5.1,.498,.545,1.7,2.1,.819,0.4,3.2,3.6,2.1,1.0,0.3,1.8,2.6,13.5 -Zhou Qi,PF,23,HOU,1,0,1.0,1.0,1.0,1.000,0.0,0.0,0,1.0,1.0,1.000,1.000,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0 -Ivan Rabb,PF,21,MEM,49,13,14.7,2.4,4.3,.547,0.1,0.3,.200,2.3,4.0,.574,.554,1.0,1.4,.710,1.4,2.8,4.2,1.1,0.3,0.3,0.7,1.8,5.8 -Chasson Randle,PG,25,WAS,49,2,15.2,1.9,4.4,.419,0.9,2.3,.400,0.9,2.1,.441,.525,0.9,1.3,.694,0.2,0.9,1.1,2.0,0.5,0.1,0.9,1.9,5.5 -Julius Randle,PF,24,NOP,73,49,30.6,7.8,14.9,.524,0.9,2.7,.344,6.9,12.2,.564,.555,4.9,6.7,.731,2.2,6.5,8.7,3.1,0.7,0.6,2.8,3.4,21.4 -JJ Redick,SG,34,PHI,76,63,31.3,5.9,13.5,.440,3.2,8.0,.397,2.8,5.6,.502,.557,3.0,3.4,.894,0.3,2.2,2.4,2.7,0.4,0.2,1.3,1.7,18.1 -Davon Reed,SG,23,IND,10,0,4.7,0.5,1.2,.417,0.2,0.4,.500,0.3,0.8,.375,.500,0.0,0.0,0,0.2,0.4,0.6,0.3,0.1,0.0,0.1,0.3,1.2 -Cameron Reynolds,SG,23,MIN,19,0,13.6,1.7,4.1,.423,1.1,2.7,.412,0.6,1.4,.444,.558,0.4,0.5,.889,0.2,1.4,1.6,0.7,0.3,0.1,0.3,1.4,5.0 -Josh Richardson,SG,25,MIA,73,73,34.8,5.8,14.1,.412,2.2,6.3,.357,3.5,7.8,.458,.492,2.7,3.2,.861,0.7,2.9,3.6,4.1,1.1,0.5,1.5,2.7,16.6 -Malachi Richardson,SG,23,TOR,22,0,4.7,0.4,1.3,.310,0.4,1.1,.320,0.0,0.2,.250,.448,0.2,0.2,.800,0.1,0.5,0.6,0.0,0.0,0.0,0.4,0.6,1.4 -Austin Rivers,SG,26,TOT,76,15,26.7,3.1,7.5,.406,1.4,4.3,.318,1.7,3.2,.522,.497,0.7,1.3,.526,0.3,1.8,2.1,2.2,0.6,0.3,0.9,2.7,8.1 -Austin Rivers,SG,26,WAS,29,2,23.6,2.6,6.7,.392,1.1,3.7,.311,1.5,3.0,.489,.477,0.9,1.6,.543,0.3,2.1,2.4,2.0,0.6,0.3,1.2,2.5,7.2 -Austin Rivers,SG,26,HOU,47,13,28.6,3.3,8.0,.413,1.5,4.7,.321,1.8,3.3,.541,.507,0.5,1.0,.510,0.3,1.6,1.9,2.3,0.6,0.3,0.7,2.9,8.7 -Devin Robinson,SF,23,WAS,7,0,13.6,2.7,4.6,.594,0.0,0.3,.000,2.7,4.3,.633,.594,1.3,2.0,.643,0.9,2.0,2.9,0.9,0.6,0.9,0.3,1.6,6.7 -Duncan Robinson,SF,24,MIA,15,1,10.7,1.2,3.1,.391,0.7,2.3,.286,0.5,0.7,.727,.500,0.3,0.4,.667,0.1,1.1,1.3,0.3,0.3,0.0,0.3,0.7,3.3 -Glenn Robinson III,SF,25,DET,47,18,13.0,1.6,3.7,.420,0.4,1.3,.290,1.2,2.4,.491,.472,0.7,0.9,.800,0.4,1.1,1.5,0.4,0.3,0.2,0.4,1.0,4.2 -Jerome Robinson,SG,21,LAC,33,0,9.7,1.3,3.3,.400,0.5,1.7,.316,0.8,1.6,.491,.482,0.2,0.3,.667,0.1,1.2,1.2,0.6,0.3,0.1,0.4,1.4,3.4 -Mitchell Robinson,C,20,NYK,66,19,20.6,3.1,4.4,.694,0.0,0.0,0,3.1,4.4,.694,.694,1.2,2.0,.600,2.7,3.7,6.4,0.6,0.8,2.4,0.5,3.3,7.3 -Rajon Rondo,PG,32,LAL,46,29,29.8,3.8,9.4,.405,1.1,3.1,.359,2.7,6.3,.428,.464,0.5,0.8,.639,0.7,4.5,5.3,8.0,1.2,0.2,2.8,2.2,9.2 -Derrick Rose,PG,30,MIN,51,13,27.3,7.1,14.8,.482,1.1,2.9,.370,6.1,11.9,.509,.518,2.7,3.1,.856,0.6,2.1,2.7,4.3,0.6,0.2,1.6,1.1,18.0 -Terrence Ross,SG,27,ORL,81,0,26.5,5.4,12.7,.428,2.7,7.0,.383,2.8,5.7,.484,.534,1.6,1.8,.875,0.3,3.1,3.5,1.7,0.9,0.4,1.1,1.5,15.1 -Terry Rozier,PG,24,BOS,79,14,22.7,3.3,8.4,.387,1.5,4.3,.353,1.8,4.2,.422,.477,0.9,1.2,.785,0.4,3.5,3.9,2.9,0.9,0.3,0.9,1.3,9.0 -Ricky Rubio,PG,28,UTA,68,67,27.9,4.3,10.7,.404,1.2,3.7,.311,3.2,7.0,.454,.458,2.9,3.4,.855,0.5,3.1,3.6,6.1,1.3,0.1,2.6,2.6,12.7 -D'Angelo Russell,PG,22,BRK,81,81,30.2,8.1,18.7,.434,2.9,7.8,.369,5.2,10.9,.482,.512,2.0,2.5,.780,0.7,3.2,3.9,7.0,1.2,0.2,3.1,1.7,21.1 -Domantas Sabonis,C,22,IND,74,5,24.8,5.6,9.5,.590,0.1,0.2,.529,5.5,9.2,.592,.596,2.8,3.9,.715,2.5,6.8,9.3,2.9,0.6,0.4,2.2,3.2,14.1 -Brandon Sampson,SG,21,CHI,14,2,15.3,2.0,4.3,.467,0.8,2.1,.379,1.2,2.2,.548,.558,0.3,0.4,.667,0.2,0.9,1.1,0.7,0.6,0.2,0.9,1.4,5.1 -JaKarr Sampson,SF,25,CHI,4,0,31.8,7.3,13.5,.537,1.3,3.5,.357,6.0,10.0,.600,.583,4.3,5.3,.810,1.3,6.8,8.0,1.0,1.0,0.8,1.0,2.0,20.0 -Dario Šarić,PF,24,TOT,81,41,25.0,3.8,8.6,.437,1.5,4.0,.365,2.3,4.6,.499,.522,1.6,1.9,.880,1.6,4.1,5.6,1.6,0.6,0.1,1.2,2.2,10.6 -Dario Šarić,PF,24,PHI,13,13,30.5,3.7,10.2,.364,1.6,5.4,.300,2.1,4.8,.435,.443,2.1,2.3,.900,1.6,5.0,6.6,2.0,0.3,0.2,1.9,3.0,11.1 -Dario Šarić,PF,24,MIN,68,28,23.9,3.8,8.3,.454,1.4,3.7,.383,2.3,4.6,.511,.540,1.5,1.8,.875,1.5,3.9,5.5,1.5,0.6,0.1,1.1,2.1,10.5 -Tomáš Satoranský,PG,27,WAS,80,54,27.1,3.2,6.6,.485,0.8,2.0,.395,2.4,4.6,.524,.545,1.6,2.0,.819,1.0,2.5,3.5,5.0,1.0,0.2,1.5,2.2,8.9 -Dennis Schröder,PG,25,OKC,79,14,29.3,5.8,14.0,.414,1.6,4.6,.341,4.2,9.4,.450,.470,2.4,2.9,.819,0.5,3.1,3.6,4.1,0.8,0.2,2.2,2.4,15.5 -Mike Scott,PF,30,TOT,79,3,17.7,2.1,5.3,.400,1.3,3.2,.401,0.8,2.1,.399,.520,0.3,0.4,.667,0.5,2.9,3.5,0.8,0.3,0.2,0.6,2.0,5.8 -Mike Scott,PF,30,LAC,52,0,14.4,1.8,4.4,.400,1.0,2.6,.391,0.8,1.9,.412,.513,0.2,0.3,.667,0.5,2.9,3.3,0.8,0.3,0.2,0.5,1.7,4.8 -Mike Scott,PF,30,PHI,27,3,24.0,2.8,7.0,.400,1.8,4.4,.412,1.0,2.6,.380,.529,0.4,0.6,.667,0.7,3.1,3.8,0.8,0.3,0.2,0.6,2.5,7.8 -Thabo Sefolosha,SF,34,UTA,50,2,12.2,1.4,3.0,.477,0.7,1.6,.436,0.7,1.4,.521,.591,0.3,0.4,.636,0.2,2.3,2.5,0.5,0.9,0.1,0.5,0.8,3.8 -Wayne Selden,SG,24,TOT,75,13,19.2,2.6,6.4,.406,0.7,2.3,.316,1.9,4.1,.456,.463,0.9,1.2,.728,0.5,1.9,2.4,1.5,0.4,0.2,1.1,1.7,6.9 -Wayne Selden,SG,24,MEM,32,0,14.2,2.0,5.0,.404,0.6,2.0,.317,1.4,3.1,.459,.466,0.7,0.9,.759,0.5,0.9,1.4,1.1,0.3,0.2,0.7,1.5,5.4 -Wayne Selden,SG,24,CHI,43,13,22.9,3.0,7.5,.407,0.8,2.6,.315,2.2,4.9,.455,.461,1.0,1.5,.714,0.5,2.7,3.2,1.7,0.5,0.2,1.3,1.9,8.0 -Collin Sexton,PG,20,CLE,82,72,31.8,6.3,14.7,.430,1.5,3.6,.402,4.9,11.1,.440,.480,2.6,3.1,.839,0.7,2.2,2.9,3.0,0.5,0.1,2.3,2.3,16.7 -Landry Shamet,SG,21,TOT,79,27,22.8,3.0,7.1,.431,2.1,5.0,.422,0.9,2.0,.453,.581,0.9,1.2,.806,0.3,1.4,1.7,1.5,0.5,0.1,0.6,2.0,9.1 -Landry Shamet,SG,21,PHI,54,4,20.5,2.8,6.4,.441,1.8,4.5,.404,1.0,1.9,.529,.584,0.8,1.0,.815,0.3,1.2,1.4,1.1,0.4,0.1,0.5,2.0,8.3 -Landry Shamet,SG,21,LAC,25,23,27.8,3.5,8.4,.414,2.7,6.0,.450,0.8,2.4,.322,.576,1.2,1.6,.795,0.3,2.0,2.2,2.3,0.5,0.1,0.8,2.0,10.9 -Iman Shumpert,SG,28,TOT,62,41,23.9,2.7,7.2,.374,1.5,4.4,.348,1.2,2.8,.416,.481,0.6,0.7,.800,0.4,2.5,3.0,1.8,1.0,0.4,0.8,2.0,7.5 -Iman Shumpert,SG,28,SAC,42,40,26.2,3.2,8.3,.382,1.8,4.8,.366,1.4,3.5,.404,.489,0.8,1.0,.829,0.5,2.6,3.1,2.2,1.1,0.5,0.9,2.2,8.9 -Iman Shumpert,SG,28,HOU,20,1,19.1,1.7,4.9,.347,1.1,3.6,.296,0.7,1.4,.481,.454,0.1,0.2,.500,0.4,2.3,2.7,1.1,0.6,0.2,0.6,1.8,4.6 -Pascal Siakam,PF,24,TOR,80,79,31.9,6.5,11.8,.549,1.0,2.7,.369,5.5,9.1,.602,.591,3.0,3.8,.785,1.6,5.3,6.9,3.1,0.9,0.7,1.9,3.0,16.9 -Jordan Sibert,SG,26,ATL,1,0,4.0,1.0,1.0,1.000,1.0,1.0,1.000,0.0,0.0,0,1.500,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0 -Ben Simmons,PG,22,PHI,79,79,34.2,6.8,12.2,.563,0.0,0.1,.000,6.8,12.1,.566,.563,3.3,5.4,.600,2.2,6.6,8.8,7.7,1.4,0.8,3.5,2.6,16.9 -Jonathon Simmons,SG,29,ORL,41,9,20.6,2.5,7.0,.364,0.5,2.0,.229,2.1,5.0,.419,.397,1.4,1.8,.778,0.6,1.9,2.4,2.3,0.4,0.3,1.4,1.7,6.9 -Jonathon Simmons,SF,29,PHI,15,0,14.6,1.9,4.3,.453,0.6,1.4,.429,1.3,2.9,.465,.523,1.1,1.7,.640,0.3,1.5,1.7,2.2,0.7,0.1,0.8,1.4,5.5 -Kobi Simmons,PG,21,CLE,1,0,2.0,0.0,0.0,0,0.0,0.0,0,0.0,0.0,0,0,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 -Anfernee Simons,SG,19,POR,20,1,7.1,1.4,3.2,.444,0.5,1.5,.345,0.9,1.7,.529,.524,0.5,0.8,.563,0.2,0.5,0.7,0.7,0.1,0.0,0.6,0.5,3.8 -Marcus Smart,SG,24,BOS,80,60,27.5,3.0,7.1,.422,1.6,4.3,.364,1.4,2.8,.511,.533,1.3,1.6,.806,0.7,2.2,2.9,4.0,1.8,0.4,1.5,2.5,8.9 -Dennis Smith Jr.,PG,21,TOT,53,50,28.5,5.2,12.3,.428,1.3,3.9,.322,4.0,8.3,.477,.479,1.9,2.9,.635,0.6,2.3,2.9,4.8,1.3,0.4,2.9,2.4,13.6 -Dennis Smith Jr.,PG,21,DAL,32,32,28.4,4.9,11.2,.440,1.3,3.9,.344,3.6,7.3,.491,.500,1.8,2.6,.695,0.6,2.5,3.0,4.3,1.3,0.3,3.1,2.6,12.9 -Dennis Smith Jr.,PG,21,NYK,21,18,28.6,5.8,14.0,.413,1.1,4.0,.289,4.6,10.0,.462,.454,2.0,3.5,.568,0.7,2.1,2.8,5.4,1.3,0.4,2.6,2.2,14.7 -Ish Smith,PG,30,DET,56,0,22.3,3.7,8.7,.419,0.8,2.4,.326,2.9,6.3,.455,.464,0.8,1.1,.758,0.4,2.2,2.6,3.6,0.5,0.2,1.1,1.9,8.9 -J.R. Smith,SG,33,CLE,11,4,20.2,2.5,7.2,.342,1.1,3.5,.308,1.4,3.6,.375,.418,0.7,0.9,.800,0.0,1.6,1.6,1.9,1.0,0.3,1.0,1.7,6.7 -Jason Smith,C,32,WAS,12,1,10.8,1.3,3.1,.405,0.3,0.8,.400,0.9,2.3,.407,.459,0.8,1.0,.833,0.9,2.2,3.1,1.0,0.1,0.4,0.6,1.8,3.7 -Jason Smith,PF,32,MIL,6,0,6.7,0.7,2.2,.308,0.5,1.5,.333,0.2,0.7,.250,.423,0.3,0.3,1.000,0.5,1.3,1.8,0.2,0.3,0.3,0.8,1.0,2.2 -Jason Smith,C,32,NOP,2,0,10.0,1.0,4.5,.222,1.0,3.5,.286,0.0,1.0,.000,.333,1.0,1.0,1.000,1.0,1.0,2.0,0.5,0.0,0.0,0.5,1.5,4.0 -Zhaire Smith,SG,19,PHI,6,2,18.5,2.3,5.7,.412,1.0,2.7,.375,1.3,3.0,.444,.500,1.0,1.3,.750,0.5,1.7,2.2,1.7,0.3,0.3,1.0,1.3,6.7 -Tony Snell,SF,27,MIL,74,12,17.6,2.2,4.9,.452,1.1,2.8,.397,1.1,2.1,.522,.564,0.5,0.6,.881,0.4,1.7,2.1,0.9,0.4,0.2,0.3,1.2,6.0 -Ray Spalding,PF,21,TOT,14,3,10.6,1.8,3.4,.532,0.0,0.2,.000,1.8,3.1,.568,.532,0.3,0.9,.333,1.1,2.4,3.4,0.4,0.6,0.6,0.6,1.6,3.9 -Ray Spalding,PF,21,DAL,1,0,1.0,0.0,0.0,0,0.0,0.0,0,0.0,0.0,0,0,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 -Ray Spalding,PF,21,PHO,13,3,11.3,1.9,3.6,.532,0.0,0.2,.000,1.9,3.4,.568,.532,0.3,0.9,.333,1.2,2.5,3.7,0.4,0.7,0.6,0.7,1.8,4.2 -Omari Spellman,PF,21,ATL,46,11,17.5,2.1,5.3,.402,1.0,2.8,.344,1.2,2.5,.466,.492,0.7,1.0,.711,1.6,2.7,4.2,1.0,0.6,0.5,0.7,1.5,5.9 -Nik Stauskas,SG,25,TOT,68,0,14.9,2.0,5.0,.402,1.0,2.8,.372,1.0,2.2,.440,.506,0.8,0.9,.891,0.3,1.6,1.9,1.2,0.3,0.1,0.8,0.7,5.9 -Nik Stauskas,SG,25,POR,44,0,15.3,2.2,5.2,.419,1.0,2.8,.344,1.2,2.4,.510,.513,0.7,0.8,.889,0.2,1.6,1.8,1.4,0.3,0.1,0.9,0.7,6.1 -Nik Stauskas,SG,25,CLE,24,0,14.3,1.7,4.5,.367,1.1,2.6,.429,0.5,1.9,.283,.491,1.0,1.2,.893,0.3,1.6,2.0,0.8,0.3,0.1,0.5,0.8,5.5 -D.J. Stephens,SG,28,MEM,1,0,7.0,1.0,2.0,.500,0.0,0.0,0,1.0,2.0,.500,.500,0.0,0.0,0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,2.0 -Lance Stephenson,SG,28,LAL,68,3,16.5,2.7,6.4,.426,1.1,2.9,.371,1.6,3.5,.472,.510,0.7,1.1,.685,0.5,2.7,3.2,2.1,0.6,0.1,1.3,1.6,7.2 -Edmond Sumner,PG,23,IND,23,2,9.1,1.0,2.8,.344,0.3,1.2,.259,0.7,1.6,.405,.398,0.7,1.0,.625,0.4,0.7,1.0,0.4,0.5,0.2,0.4,1.1,2.9 -Caleb Swanigan,PF,21,TOT,21,0,8.5,0.9,2.5,.340,0.0,0.3,.143,0.8,2.2,.370,.349,0.3,0.4,.667,0.8,2.2,3.0,0.5,0.3,0.0,0.9,1.3,2.0 -Caleb Swanigan,PF,21,POR,18,0,8.1,0.8,2.4,.318,0.1,0.3,.200,0.7,2.2,.333,.330,0.3,0.4,.857,0.7,2.2,2.9,0.4,0.2,0.0,0.9,1.2,1.9 -Caleb Swanigan,PF,21,SAC,3,0,11.0,1.3,3.0,.444,0.0,0.7,.000,1.3,2.3,.571,.444,0.0,0.7,.000,1.3,2.7,4.0,1.3,0.7,0.3,0.7,2.0,2.7 -Jayson Tatum,SF,20,BOS,79,79,31.1,5.9,13.1,.450,1.5,3.9,.373,4.4,9.2,.483,.506,2.5,2.9,.855,0.9,5.2,6.0,2.1,1.1,0.7,1.5,2.1,15.7 -Jeff Teague,PG,30,MIN,42,41,30.1,4.2,9.9,.423,0.8,2.5,.333,3.4,7.4,.453,.465,2.9,3.6,.804,0.4,2.1,2.5,8.2,1.0,0.4,2.3,2.1,12.1 -Garrett Temple,SG,32,TOT,75,55,27.2,2.8,6.6,.422,1.2,3.5,.341,1.6,3.1,.515,.513,1.1,1.4,.748,0.4,2.5,2.9,1.4,1.0,0.4,0.9,2.7,7.8 -Garrett Temple,SG,32,MEM,49,49,31.2,3.4,7.9,.429,1.5,4.3,.352,1.9,3.6,.520,.525,1.2,1.6,.750,0.4,2.7,3.1,1.4,1.0,0.5,1.1,2.7,9.4 -Garrett Temple,SG,32,LAC,26,6,19.6,1.6,4.1,.396,0.6,2.1,.296,1.0,2.0,.500,.472,0.9,1.2,.742,0.4,2.1,2.5,1.4,1.0,0.2,0.6,2.7,4.7 -Miloš Teodosić,PG,31,LAC,15,0,10.0,1.1,2.7,.425,0.7,1.8,.370,0.5,0.9,.538,.550,0.3,0.5,.571,0.2,0.9,1.1,2.1,0.2,0.1,1.4,1.8,3.2 -Jared Terrell,SG,23,MIN,14,0,7.9,0.9,2.8,.308,0.3,1.2,.235,0.6,1.6,.364,.359,0.2,0.4,.500,0.1,0.4,0.4,0.9,0.2,0.1,0.7,1.1,2.2 -Emanuel Terry,PF,22,TOT,3,0,7.7,1.3,2.3,.571,0.0,0.0,0,1.3,2.3,.571,.571,0.7,1.3,.500,1.0,1.3,2.3,0.7,1.0,0.0,0.7,1.3,3.3 -Emanuel Terry,PF,22,PHO,2,0,10.0,2.0,3.0,.667,0.0,0.0,0,2.0,3.0,.667,.667,0.5,1.0,.500,1.0,2.0,3.0,0.5,1.5,0.0,1.0,2.0,4.5 -Emanuel Terry,PF,22,MIA,1,0,3.0,0.0,1.0,.000,0.0,0.0,0,0.0,1.0,.000,.000,1.0,2.0,.500,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0 -Daniel Theis,C,26,BOS,66,2,13.8,2.2,4.0,.549,0.4,1.0,.388,1.8,3.0,.603,.598,0.8,1.2,.737,1.3,2.1,3.4,1.0,0.3,0.6,0.5,2.4,5.7 -Isaiah Thomas,SG,29,DEN,12,0,15.1,2.8,8.3,.343,1.0,3.6,.279,1.8,4.7,.393,.404,1.4,2.3,.630,0.4,0.7,1.1,1.9,0.4,0.1,1.5,1.4,8.1 -Khyri Thomas,SG,22,DET,26,0,7.5,0.8,2.7,.319,0.4,1.3,.286,0.5,1.3,.353,.391,0.3,0.4,.636,0.2,0.6,0.8,0.3,0.3,0.2,0.2,0.8,2.3 -Lance Thomas,PF,30,NYK,46,17,17.0,1.7,4.3,.396,0.5,1.7,.278,1.2,2.6,.475,.452,0.6,0.8,.750,0.5,2.1,2.5,0.6,0.4,0.2,0.5,1.8,4.5 -Klay Thompson,SG,28,GSW,78,78,34.0,8.4,18.0,.467,3.1,7.7,.402,5.3,10.3,.516,.553,1.7,2.0,.816,0.5,3.4,3.8,2.4,1.1,0.6,1.5,2.0,21.5 -Tristan Thompson,C,27,CLE,43,40,27.9,4.7,8.8,.529,0.0,0.0,0,4.7,8.8,.529,.529,1.6,2.5,.642,4.0,6.2,10.2,2.0,0.7,0.4,1.4,2.1,10.9 -Sindarius Thornwell,SG,24,LAC,64,1,4.9,0.3,0.8,.347,0.0,0.2,.200,0.2,0.5,.412,.378,0.4,0.5,.735,0.1,0.6,0.7,0.3,0.2,0.1,0.3,0.6,1.0 -Anthony Tolliver,PF,33,MIN,65,0,16.6,1.5,4.0,.382,1.2,3.3,.377,0.3,0.7,.409,.539,0.7,0.9,.783,0.2,2.5,2.7,0.7,0.3,0.3,0.6,1.4,5.0 -Karl-Anthony Towns,C,23,MIN,77,77,33.1,8.8,17.1,.518,1.8,4.6,.400,7.0,12.5,.562,.572,4.9,5.8,.836,3.4,9.0,12.4,3.4,0.9,1.6,3.1,3.8,24.4 -Gary Trent Jr.,SG,20,POR,15,1,7.4,1.1,3.3,.320,0.3,1.4,.238,0.7,1.9,.379,.370,0.2,0.5,.429,0.1,0.7,0.7,0.3,0.1,0.1,0.3,0.3,2.7 -Allonzo Trier,SG,23,NYK,64,3,22.8,3.6,8.1,.448,0.8,2.1,.394,2.8,6.0,.466,.498,2.8,3.5,.803,0.5,2.6,3.1,1.9,0.4,0.2,1.8,1.8,10.9 -P.J. Tucker,PF,33,HOU,82,82,34.2,2.5,6.4,.396,1.8,4.7,.377,0.7,1.7,.449,.535,0.5,0.7,.695,1.5,4.4,5.8,1.2,1.6,0.5,0.8,3.1,7.3 -Evan Turner,PG,30,POR,73,2,22.0,2.8,6.1,.460,0.2,0.7,.212,2.6,5.4,.494,.473,1.0,1.5,.708,0.5,4.0,4.5,3.9,0.5,0.2,1.6,1.5,6.8 -Myles Turner,C,22,IND,74,74,28.6,5.1,10.5,.487,1.0,2.6,.388,4.1,7.9,.521,.536,2.0,2.7,.736,1.4,5.8,7.2,1.6,0.8,2.7,1.4,2.6,13.3 -Ekpe Udoh,C,31,UTA,51,1,6.3,1.0,1.4,.694,0.0,0.0,0,1.0,1.4,.694,.694,0.4,0.6,.633,0.5,1.2,1.8,0.5,0.2,0.6,0.3,0.7,2.3 -Tyler Ulis,PG,23,CHI,1,0,1.0,0.0,0.0,0,0.0,0.0,0,0.0,0.0,0,0,0.0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 -Jonas Valančiūnas,C,26,TOT,49,27,22.3,6.1,11.0,.559,0.3,1.0,.292,5.9,10.0,.586,.572,3.0,3.8,.795,2.2,6.4,8.6,1.4,0.4,1.1,1.8,3.0,15.6 -Jonas Valančiūnas,C,26,TOR,30,10,18.8,5.0,8.6,.575,0.3,1.0,.300,4.7,7.6,.611,.593,2.6,3.1,.819,1.8,5.4,7.2,1.0,0.4,0.8,1.3,2.7,12.8 -Jonas Valančiūnas,C,26,MEM,19,17,27.7,8.0,14.7,.545,0.3,0.9,.278,7.7,13.7,.563,.554,3.7,4.8,.769,2.7,7.9,10.7,2.2,0.3,1.6,2.7,3.4,19.9 -Jarred Vanderbilt,PF,19,DEN,17,0,4.1,0.5,1.1,.474,0.0,0.1,.000,0.5,1.1,.500,.474,0.4,0.6,.600,0.4,0.9,1.4,0.2,0.4,0.1,0.5,0.5,1.4 -Fred VanVleet,PG,24,TOR,64,28,27.5,3.8,9.4,.410,1.8,4.6,.378,2.1,4.8,.441,.503,1.5,1.8,.843,0.3,2.3,2.6,4.8,0.9,0.3,1.3,1.7,11.0 -Noah Vonleh,PF,23,NYK,68,57,25.3,3.0,6.5,.470,0.7,2.0,.336,2.4,4.5,.531,.523,1.6,2.3,.712,1.7,6.1,7.8,1.9,0.7,0.8,1.3,2.6,8.4 -Nikola Vučević,C,28,ORL,80,80,31.4,8.8,16.9,.518,1.1,2.9,.364,7.7,14.0,.549,.549,2.2,2.8,.789,2.8,9.2,12.0,3.8,1.0,1.1,2.0,2.0,20.8 -Dwyane Wade*,SG,37,MIA,72,2,26.2,5.8,13.3,.433,1.2,3.6,.330,4.6,9.7,.472,.478,2.3,3.2,.708,1.0,3.0,4.0,4.2,0.8,0.5,2.3,1.6,15.0 -Moritz Wagner,C,21,LAL,43,5,10.4,1.7,4.0,.415,0.5,1.8,.286,1.1,2.2,.521,.480,1.0,1.2,.811,0.4,1.6,2.0,0.6,0.3,0.3,0.9,1.3,4.8 -Dion Waiters,SG,27,MIA,44,28,25.9,4.5,10.9,.414,2.5,6.6,.377,2.0,4.3,.471,.528,0.5,1.0,.500,0.2,2.5,2.6,2.8,0.7,0.2,1.5,1.6,12.0 -Kemba Walker,PG,28,CHO,82,82,34.9,8.9,20.5,.434,3.2,8.9,.356,5.7,11.6,.494,.511,4.6,5.5,.844,0.6,3.8,4.4,5.9,1.2,0.4,2.6,1.6,25.6 -Lonnie Walker IV,SG,20,SAS,17,0,6.9,0.9,2.7,.348,0.3,0.8,.385,0.6,1.9,.333,.402,0.5,0.6,.800,0.0,1.0,1.0,0.5,0.4,0.2,0.2,0.4,2.6 -John Wall,PG,28,WAS,32,32,34.5,7.7,17.3,.444,1.6,5.3,.302,6.1,12.0,.507,.490,3.8,5.5,.697,0.5,3.2,3.6,8.7,1.5,0.9,3.8,2.2,20.7 -Tyrone Wallace,PG,24,LAC,62,0,10.1,1.5,3.5,.424,0.1,0.3,.211,1.4,3.2,.444,.433,0.5,0.9,.526,0.3,1.3,1.6,0.7,0.3,0.1,0.6,1.3,3.5 -Brad Wanamaker,PG,29,BOS,36,0,9.5,1.4,2.9,.476,0.4,1.1,.410,0.9,1.8,.515,.552,0.7,0.8,.857,0.1,1.1,1.1,1.6,0.3,0.1,0.5,0.9,3.9 -T.J. Warren,SF,25,PHO,43,36,31.6,6.9,14.2,.486,1.8,4.2,.428,5.1,10.0,.510,.549,2.3,2.9,.815,0.7,3.3,4.0,1.5,1.2,0.7,1.2,2.8,18.0 -Julian Washburn,SF,27,MEM,18,3,14.1,0.9,2.7,.333,0.3,1.3,.208,0.6,1.3,.458,.385,0.2,0.2,.750,0.4,1.8,2.3,0.8,0.7,0.1,0.4,0.9,2.2 -Yuta Watanabe,SF,24,MEM,15,0,11.6,1.0,3.4,.294,0.1,1.1,.125,0.9,2.3,.371,.314,0.5,0.7,.700,0.3,1.8,2.1,0.5,0.3,0.1,0.4,0.7,2.6 -Thomas Welsh,C,22,DEN,11,0,3.3,0.6,1.2,.538,0.3,0.6,.429,0.4,0.5,.667,.654,0.1,0.2,.500,0.0,0.4,0.4,0.5,0.0,0.0,0.1,0.3,1.6 -Russell Westbrook,PG,30,OKC,73,73,36.0,8.6,20.2,.428,1.6,5.6,.290,7.0,14.5,.481,.468,4.1,6.2,.656,1.5,9.6,11.1,10.7,1.9,0.5,4.5,3.4,22.9 -Derrick White,PG,24,SAS,67,55,25.8,3.9,8.1,.479,0.7,2.1,.338,3.2,6.0,.529,.523,1.4,1.8,.772,0.5,3.2,3.7,3.9,1.0,0.7,1.4,2.2,9.9 -Okaro White,PF,26,WAS,3,0,2.0,0.0,0.7,.000,0.0,0.7,.000,0.0,0.0,0,.000,0.0,0.0,0,0.3,0.3,0.7,0.0,0.0,0.0,0.0,0.0,0.0 -Hassan Whiteside,C,29,MIA,72,53,23.3,5.4,9.4,.571,0.0,0.2,.125,5.4,9.2,.581,.572,1.5,3.4,.449,3.6,7.8,11.3,0.8,0.6,1.9,1.3,2.7,12.3 -Andrew Wiggins,SF,23,MIN,73,73,34.8,6.8,16.6,.412,1.6,4.8,.339,5.2,11.8,.441,.461,2.8,4.1,.699,1.1,3.7,4.8,2.5,1.0,0.7,1.9,2.1,18.1 -Alan Williams,PF,26,BRK,5,0,5.2,1.6,2.6,.615,0.0,0.2,.000,1.6,2.4,.667,.615,0.4,0.8,.500,0.8,3.0,3.8,0.6,0.2,0.0,0.2,0.4,3.6 -C.J. Williams,SG,28,MIN,15,0,8.5,1.1,2.3,.486,0.3,1.1,.313,0.8,1.3,.632,.557,0.0,0.1,.000,0.1,0.5,0.5,0.8,0.4,0.0,0.3,1.1,2.6 -Johnathan Williams,C,23,LAL,24,0,15.5,2.7,4.6,.591,0.0,0.1,.000,2.7,4.5,.602,.591,1.1,2.0,.563,2.0,2.1,4.1,0.5,0.3,0.3,0.7,2.6,6.5 -Kenrich Williams,SF,24,NOP,46,29,23.5,2.3,6.1,.384,1.1,3.4,.333,1.2,2.7,.447,.477,0.3,0.4,.684,1.2,3.6,4.8,1.8,1.0,0.4,0.8,2.1,6.1 -Lou Williams,SG,32,LAC,75,1,26.6,6.5,15.2,.425,1.4,3.9,.361,5.1,11.3,.447,.471,5.7,6.5,.876,0.5,2.4,3.0,5.4,0.8,0.1,2.4,1.1,20.0 -Marvin Williams,PF,32,CHO,75,75,28.4,3.7,8.7,.422,1.9,5.1,.366,1.8,3.6,.500,.529,0.9,1.1,.767,1.0,4.4,5.4,1.2,0.9,0.8,0.6,2.1,10.1 -Robert Williams,C,21,BOS,32,2,8.8,1.1,1.6,.706,0.0,0.0,0,1.1,1.6,.706,.706,0.3,0.5,.600,0.8,1.7,2.5,0.2,0.3,1.3,0.3,1.1,2.5 -Troy Williams,SF,24,SAC,21,0,14.9,2.1,4.7,.449,0.7,2.1,.318,1.4,2.6,.556,.520,0.4,0.7,.600,0.6,2.2,2.8,0.5,0.5,0.4,0.4,1.8,5.3 -D.J. Wilson,PF,22,MIL,48,3,18.4,2.2,5.2,.414,1.0,2.6,.362,1.2,2.6,.468,.506,0.5,1.0,.553,0.9,3.7,4.6,1.1,0.4,0.4,0.7,1.8,5.8 -Justise Winslow,SF,22,MIA,66,52,29.7,4.9,11.3,.433,1.5,3.9,.375,3.5,7.5,.462,.497,1.3,2.1,.628,1.0,4.4,5.4,4.3,1.1,0.3,2.2,2.7,12.6 -Christian Wood,PF,23,TOT,21,2,12.0,2.9,5.6,.521,0.4,1.2,.346,2.5,4.3,.571,.560,2.0,2.7,.732,0.8,3.1,4.0,0.4,0.3,0.5,0.8,0.8,8.2 -Christian Wood,PF,23,MIL,13,0,4.8,0.9,1.9,.480,0.2,0.4,.600,0.7,1.5,.450,.540,0.8,1.2,.667,0.3,1.2,1.5,0.2,0.0,0.0,0.2,0.2,2.8 -Christian Wood,PF,23,NOP,8,2,23.6,6.1,11.5,.533,0.8,2.6,.286,5.4,8.9,.606,.565,3.9,5.1,.756,1.6,6.3,7.9,0.8,0.9,1.3,1.8,1.8,16.9 -Delon Wright,PG,26,TOT,75,13,22.7,3.2,7.4,.434,0.7,2.2,.298,2.6,5.2,.492,.478,1.6,2.0,.793,0.9,2.6,3.5,3.3,1.2,0.4,1.0,1.4,8.7 -Delon Wright,PG,26,TOR,49,2,18.3,2.6,6.0,.433,0.6,1.8,.333,2.0,4.1,.478,.485,1.1,1.2,.869,0.8,1.8,2.6,2.2,0.9,0.3,0.8,1.1,6.9 -Delon Wright,PG,26,MEM,26,11,30.8,4.4,10.2,.434,0.8,3.0,.256,3.7,7.2,.508,.472,2.5,3.4,.742,1.1,4.3,5.4,5.3,1.6,0.6,1.5,1.9,12.2 -Guerschon Yabusele,PF,23,BOS,41,1,6.1,0.9,1.9,.455,0.2,0.7,.321,0.6,1.2,.531,.513,0.4,0.5,.682,0.6,0.7,1.3,0.4,0.2,0.2,0.4,0.8,2.3 -Nick Young,SG,33,DEN,4,0,9.3,0.8,2.3,.333,0.8,2.0,.375,0.0,0.3,.000,.500,0.0,0.0,0,0.0,0.3,0.3,0.5,0.0,0.3,0.5,1.0,2.3 -Thaddeus Young,PF,30,IND,81,81,30.7,5.5,10.4,.527,0.6,1.8,.349,4.8,8.6,.564,.557,1.1,1.7,.644,2.4,4.1,6.5,2.5,1.5,0.4,1.5,2.4,12.6 -Trae Young,PG,20,ATL,81,81,30.9,6.5,15.5,.418,1.9,6.0,.324,4.6,9.6,.477,.480,4.2,5.1,.829,0.8,2.9,3.7,8.1,0.9,0.2,3.8,1.7,19.1 -Cody Zeller,C,26,CHO,49,47,25.4,3.9,7.0,.551,0.1,0.4,.273,3.8,6.6,.570,.559,2.3,2.9,.787,2.2,4.6,6.8,2.1,0.8,0.8,1.3,3.3,10.1 -Tyler Zeller,C,29,TOT,6,1,15.5,2.7,5.0,.533,0.0,0.2,.000,2.7,4.8,.552,.533,2.3,3.0,.778,1.8,2.2,4.0,0.7,0.2,0.5,0.7,3.3,7.7 -Tyler Zeller,C,29,ATL,2,0,5.5,0.0,1.0,.000,0.0,0.5,.000,0.0,0.5,.000,.000,0.0,0.0,0,1.0,2.0,3.0,0.5,0.0,0.0,0.0,2.0,0.0 -Tyler Zeller,C,29,MEM,4,1,20.5,4.0,7.0,.571,0.0,0.0,0,4.0,7.0,.571,.571,3.5,4.5,.778,2.3,2.3,4.5,0.8,0.3,0.8,1.0,4.0,11.5 -Ante Žižić,C,22,CLE,59,25,18.3,3.1,5.6,.553,0.0,0.0,0,3.1,5.6,.553,.553,1.6,2.2,.705,1.8,3.6,5.4,0.9,0.2,0.4,1.0,1.9,7.8 -Ivica Zubac,C,21,TOT,59,37,17.6,3.6,6.4,.559,0.0,0.0,0,3.6,6.4,.559,.559,1.7,2.1,.802,1.9,4.2,6.1,1.1,0.2,0.9,1.2,2.3,8.9 -Ivica Zubac,C,21,LAL,33,12,15.6,3.4,5.8,.580,0.0,0.0,0,3.4,5.8,.580,.580,1.7,2.0,.864,1.6,3.3,4.9,0.8,0.1,0.8,1.0,2.2,8.5 -Ivica Zubac,C,21,LAC,26,25,20.2,3.8,7.2,.538,0.0,0.0,0,3.8,7.2,.538,.538,1.7,2.3,.733,2.3,5.3,7.7,1.5,0.4,0.9,1.4,2.5,9.4 diff --git a/app/src/pages/00_Pol_Strat_Home.py b/app/src/pages/00_Pol_Strat_Home.py deleted file mode 100644 index 3d02f25552..0000000000 --- a/app/src/pages/00_Pol_Strat_Home.py +++ /dev/null @@ -1,25 +0,0 @@ -import logging -logger = logging.getLogger(__name__) - -import streamlit as st -from modules.nav import SideBarLinks - -st.set_page_config(layout = 'wide') - -# Show appropriate sidebar links for the role of the currently logged in user -SideBarLinks() - -st.title(f"Welcome Political Strategist, {st.session_state['first_name']}.") -st.write('') -st.write('') -st.write('### What would you like to do today?') - -if st.button('View World Bank Data Visualization', - type='primary', - use_container_width=True): - st.switch_page('pages/01_World_Bank_Viz.py') - -if st.button('View World Map Demo', - type='primary', - use_container_width=True): - st.switch_page('pages/02_Map_Demo.py') \ No newline at end of file diff --git a/app/src/pages/01_World_Bank_Viz.py b/app/src/pages/01_World_Bank_Viz.py deleted file mode 100644 index a34cbb1529..0000000000 --- a/app/src/pages/01_World_Bank_Viz.py +++ /dev/null @@ -1,41 +0,0 @@ -import logging -logger = logging.getLogger(__name__) -import pandas as pd -import streamlit as st -from streamlit_extras.app_logo import add_logo -import world_bank_data as wb -import matplotlib.pyplot as plt -import numpy as np -import plotly.express as px -from modules.nav import SideBarLinks - -# Call the SideBarLinks from the nav module in the modules directory -SideBarLinks() - -# set the header of the page -st.header('World Bank Data') - -# You can access the session state to make a more customized/personalized app experience -st.write(f"### Hi, {st.session_state['first_name']}.") - -# get the countries from the world bank data -with st.echo(code_location='above'): - countries:pd.DataFrame = wb.get_countries() - - st.dataframe(countries) - -# the with statment shows the code for this block above it -with st.echo(code_location='above'): - arr = np.random.normal(1, 1, size=100) - test_plot, ax = plt.subplots() - ax.hist(arr, bins=20) - - st.pyplot(test_plot) - - -with st.echo(code_location='above'): - slim_countries = countries[countries['incomeLevel'] != 'Aggregates'] - data_crosstab = pd.crosstab(slim_countries['region'], - slim_countries['incomeLevel'], - margins = False) - st.table(data_crosstab) diff --git a/app/src/pages/02_Map_Demo.py b/app/src/pages/02_Map_Demo.py deleted file mode 100644 index 5ca09a9633..0000000000 --- a/app/src/pages/02_Map_Demo.py +++ /dev/null @@ -1,104 +0,0 @@ -import logging -logger = logging.getLogger(__name__) -import streamlit as st -from streamlit_extras.app_logo import add_logo -import pandas as pd -import pydeck as pdk -from urllib.error import URLError -from modules.nav import SideBarLinks - -SideBarLinks() - -# add the logo -add_logo("assets/logo.png", height=400) - -# set up the page -st.markdown("# Mapping Demo") -st.sidebar.header("Mapping Demo") -st.write( - """This Mapping Demo is from the Streamlit Documentation. It shows how to use -[`st.pydeck_chart`](https://docs.streamlit.io/library/api-reference/charts/st.pydeck_chart) -to display geospatial data.""" -) - - -@st.cache_data -def from_data_file(filename): - url = ( - "http://raw.githubusercontent.com/streamlit/" - "example-data/master/hello/v1/%s" % filename - ) - return pd.read_json(url) - - -try: - ALL_LAYERS = { - "Bike Rentals": pdk.Layer( - "HexagonLayer", - data=from_data_file("bike_rental_stats.json"), - get_position=["lon", "lat"], - radius=200, - elevation_scale=4, - elevation_range=[0, 1000], - extruded=True, - ), - "Bart Stop Exits": pdk.Layer( - "ScatterplotLayer", - data=from_data_file("bart_stop_stats.json"), - get_position=["lon", "lat"], - get_color=[200, 30, 0, 160], - get_radius="[exits]", - radius_scale=0.05, - ), - "Bart Stop Names": pdk.Layer( - "TextLayer", - data=from_data_file("bart_stop_stats.json"), - get_position=["lon", "lat"], - get_text="name", - get_color=[0, 0, 0, 200], - get_size=15, - get_alignment_baseline="'bottom'", - ), - "Outbound Flow": pdk.Layer( - "ArcLayer", - data=from_data_file("bart_path_stats.json"), - get_source_position=["lon", "lat"], - get_target_position=["lon2", "lat2"], - get_source_color=[200, 30, 0, 160], - get_target_color=[200, 30, 0, 160], - auto_highlight=True, - width_scale=0.0001, - get_width="outbound", - width_min_pixels=3, - width_max_pixels=30, - ), - } - st.sidebar.markdown("### Map Layers") - selected_layers = [ - layer - for layer_name, layer in ALL_LAYERS.items() - if st.sidebar.checkbox(layer_name, True) - ] - if selected_layers: - st.pydeck_chart( - pdk.Deck( - map_style="mapbox://styles/mapbox/light-v9", - initial_view_state={ - "latitude": 37.76, - "longitude": -122.4, - "zoom": 11, - "pitch": 50, - }, - layers=selected_layers, - ) - ) - else: - st.error("Please choose at least one layer above.") -except URLError as e: - st.error( - """ - **This demo requires internet access.** - Connection error: %s - """ - % e.reason - ) diff --git a/app/src/pages/03_Simple_Chat_Bot.py b/app/src/pages/03_Simple_Chat_Bot.py deleted file mode 100644 index fa8db58e84..0000000000 --- a/app/src/pages/03_Simple_Chat_Bot.py +++ /dev/null @@ -1,66 +0,0 @@ -import logging -logger = logging.getLogger(__name__) -import streamlit as st -from streamlit_extras.app_logo import add_logo -import numpy as np -import random -import time -from modules.nav import SideBarLinks - -SideBarLinks() - -def response_generator(): - response = random.choice ( - [ - "Hello there! How can I assist you today?", - "Hi, human! Is there anything I can help you with?", - "Do you need help?", - ] - ) - for word in response.split(): - yield word + " " - time.sleep(0.05) -#----------------------------------------------------------------------- - -st.set_page_config (page_title="Sample Chat Bot", page_icon="🤖") -add_logo("assets/logo.png", height=400) - -st.title("Echo Bot 🤖") - -st.markdown(""" - Currently, this chat bot only returns a random message from the following list: - - Hello there! How can I assist you today? - - Hi, human! Is there anything I can help you with? - - Do you need help? - """ - ) - - -# Initialize chat history -if "messages" not in st.session_state: - st.session_state.messages = [] - -# Display chat message from history on app rerun -for message in st.session_state.messages: - with st.chat_message(message["role"]): - st.markdown(message["content"]) - -# React to user input -if prompt := st.chat_input("What is up?"): - # Display user message in chat message container - with st.chat_message("user"): - st.markdown(prompt) - - # Add user message to chat history - st.session_state.messages.append({"role": "user", "content": prompt}) - - response = f"Echo: {prompt}" - - # Display assistant response in chat message container - with st.chat_message("assistant"): - # st.markdown(response) - response = st.write_stream(response_generator()) - - # Add assistant response to chat history - st.session_state.messages.append({"role": "assistant", "content": response}) - diff --git a/app/src/pages/04_Prediction.py b/app/src/pages/04_Prediction.py deleted file mode 100644 index a5a322a2f4..0000000000 --- a/app/src/pages/04_Prediction.py +++ /dev/null @@ -1,38 +0,0 @@ -import logging -logger = logging.getLogger(__name__) - -import streamlit as st -from modules.nav import SideBarLinks -import requests - -st.set_page_config(layout = 'wide') - -# Display the appropriate sidebar links for the role of the logged in user -SideBarLinks() - -st.title('Prediction with Regression') - -# create a 2 column layout -col1, col2 = st.columns(2) - -# add one number input for variable 1 into column 1 -with col1: - var_01 = st.number_input('Variable 01:', - step=1) - -# add another number input for variable 2 into column 2 -with col2: - var_02 = st.number_input('Variable 02:', - step=1) - -logger.info(f'var_01 = {var_01}') -logger.info(f'var_02 = {var_02}') - -# add a button to use the values entered into the number field to send to the -# prediction function via the REST API -if st.button('Calculate Prediction', - type='primary', - use_container_width=True): - results = requests.get(f'http://api:4000/c/prediction/{var_01}/{var_02}').json() - st.dataframe(results) - \ No newline at end of file diff --git a/app/src/pages/10_Superfan_Home.py b/app/src/pages/10_Superfan_Home.py new file mode 100644 index 0000000000..7f52aa7b3e --- /dev/null +++ b/app/src/pages/10_Superfan_Home.py @@ -0,0 +1,28 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks + +st.set_page_config(layout='wide') +SideBarLinks() + +st.title(f"Welcome Super Fan, {st.session_state.get('first_name', 'Guest')}.") +st.write('') +st.write('') +st.write('### What would you like to do today?') + +if st.button('Player Comparison', + type='primary', + use_container_width=True): + st.switch_page('pages/12_Player_Comparison.py') + +if st.button('Player Stats', + type='primary', + use_container_width=True): + st.switch_page('pages/11_Player_Finder.py') + +if st.button('Game Analysis', + type='primary', + use_container_width=True): + st.switch_page('pages/13_Historical_Game_Results.py') diff --git a/app/src/pages/10_USAID_Worker_Home.py b/app/src/pages/10_USAID_Worker_Home.py deleted file mode 100644 index d7b230384c..0000000000 --- a/app/src/pages/10_USAID_Worker_Home.py +++ /dev/null @@ -1,30 +0,0 @@ -import logging -logger = logging.getLogger(__name__) - -import streamlit as st -from modules.nav import SideBarLinks - -st.set_page_config(layout = 'wide') - -# Show appropriate sidebar links for the role of the currently logged in user -SideBarLinks() - -st.title(f"Welcome USAID Worker, {st.session_state['first_name']}.") -st.write('') -st.write('') -st.write('### What would you like to do today?') - -if st.button('Predict Value Based on Regression Model', - type='primary', - use_container_width=True): - st.switch_page('pages/11_Prediction.py') - -if st.button('View the Simple API Demo', - type='primary', - use_container_width=True): - st.switch_page('pages/12_API_Test.py') - -if st.button("View Classification Demo", - type='primary', - use_container_width=True): - st.switch_page('pages/13_Classification.py') \ No newline at end of file diff --git a/app/src/pages/11_Player_Finder.py b/app/src/pages/11_Player_Finder.py new file mode 100644 index 0000000000..a2d4a2f2b2 --- /dev/null +++ b/app/src/pages/11_Player_Finder.py @@ -0,0 +1,208 @@ +# pages/Player_Finder.py +import os +import logging +import requests +import pandas as pd +import streamlit as st +import plotly.express as px +from modules.nav import SideBarLinks + +logger = logging.getLogger(__name__) +st.set_page_config(page_title="Player Finder", layout="wide") + +# Render your app's sidebar nav (logo, links, auth redirect, etc.) +SideBarLinks() + +st.title("🔎 Player Finder (Superfan)") + +# If your Streamlit app runs in Docker alongside the API, keep http://api:4000 +# If you run Streamlit on your host, set API_BASE_URL=http://localhost:4000 +BASE_URL = os.getenv("API_BASE_URL", "http://api:4000") + +# ---------- Utilities ---------- +def api_get(path: str, params: dict | None = None): + try: + url = f"{BASE_URL}{path}" + r = requests.get(url, params=params, timeout=20) + if r.status_code in (200, 201): + return r.json() + st.error(f"API {r.status_code}: {r.text}") + except Exception as e: + st.error(f"Connection error: {e}") + return None + +@st.cache_data(ttl=120) +def load_players(position=None, team_id=None, min_age=None, max_age=None, min_salary=None, max_salary=None): + params = {} + if position: params["position"] = position + if team_id: params["team_id"] = team_id + if min_age is not None: params["min_age"] = min_age + if max_age is not None: params["max_age"] = max_age + if min_salary is not None: params["min_salary"] = min_salary + if max_salary is not None: params["max_salary"] = max_salary + + # ✅ Prefixed path + data = api_get("/basketball/players", params) + + # Your /players handler currently returns a list (not wrapped in {"players": ...}) + if isinstance(data, dict) and "players" in data: + rows = data["players"] + else: + rows = data or [] + return pd.DataFrame(rows) + +@st.cache_data(ttl=120) +def fetch_player_season_stat(player_id: int, season: str | None): + params = {} + if season: + params["season"] = season + # NOTE: Your /players//stats returns a dict {stats: {...}, recent_games: [...]} + # ✅ Prefixed path + resp = api_get(f"/basketball/players/{player_id}/stats", params) + if not resp or "stats" not in resp or resp["stats"] is None: + return {} + return resp["stats"] + +def enrich_with_stats(df_players: pd.DataFrame, season: str | None, max_players: int = 50): + """For performance, cap the number of stat fetches.""" + if df_players.empty: + return df_players + + rows = [] + limit = min(len(df_players), max_players) + for _, row in df_players.head(limit).iterrows(): + stats = fetch_player_season_stat(int(row["player_id"]), season) + merged = {**row.to_dict(), **(stats or {})} + rows.append(merged) + # If there are more players than max_players, keep the remaining without stats. + if len(df_players) > limit: + remainder = df_players.iloc[limit:].copy() + rows += [r for _, r in remainder.iterrows()] + return pd.DataFrame(rows) + +# ---------- Filters (TOP, not sidebar) ---------- +st.header("Filters") + +col1, col2, col3, col4 = st.columns(4) +with col1: + position = st.selectbox( + "Position", + options=["", "PG", "SG", "SF", "PF", "C", "Guard", "Forward", "Center"], + index=0, + help="Filter by the player's listed position." + ) +with col2: + team_id_input = st.text_input("Team ID (optional)", value="", help="Enter a numeric team_id.") + team_id = int(team_id_input) if team_id_input.strip().isdigit() else None +with col3: + min_age = st.number_input("Min Age", min_value=0, max_value=60, value=0, step=1) +with col4: + max_age = st.number_input("Max Age", min_value=0, max_value=60, value=60, step=1) + +col5, col6, col7, col8 = st.columns(4) +with col5: + min_salary = st.number_input("Min Salary", min_value=0, value=0, step=100_000) +with col6: + max_salary = st.number_input("Max Salary", min_value=0, value=0, step=100_000, help="0 means no max") +with col7: + season = st.text_input("Season (optional)", value="", help="e.g., 2024-25 or 2025. Leave blank for all seasons.") +with col8: + include_stats = st.checkbox("Include Season Averages (/basketball/players//stats)", value=True) + +col9, col10, col11 = st.columns([1, 1, 2]) +with col9: + max_stats = st.slider("Max Players for Stats", 10, 200, 50, 10) +with col10: + stat_to_sort = st.selectbox( + "Stat to Sort/Chart", + options=[ + "avg_points", "avg_rebounds", "avg_assists", + "avg_steals", "avg_blocks", "avg_turnovers", + "avg_plus_minus", "avg_minutes", "avg_shooting_pct", + ], + index=0 + ) +with col11: + run_btn = st.button("Search Players", type="primary") + +st.markdown("---") + +# ---------- Main area ---------- +if run_btn: + # Treat 0 as “unset” for max_salary to avoid filtering out everyone + max_salary_param = None if max_salary == 0 else max_salary + + df = load_players( + position=(position or None), + team_id=team_id, + min_age=min_age if min_age > 0 else None, + max_age=max_age if max_age < 60 else None, + min_salary=min_salary if min_salary > 0 else None, + max_salary=max_salary_param + ) + + if df.empty: + st.warning("No players found for these filters.") + st.stop() + + st.subheader("Players (Basic Info)") + st.dataframe(df, use_container_width=True, hide_index=True) + + if include_stats: + st.info("Fetching season averages… this may take a moment for many players.") + df_stats = enrich_with_stats(df, season if season.strip() else None, max_players=max_stats) + else: + df_stats = df.copy() + + # Ensure the selected stat column exists + if stat_to_sort not in df_stats.columns: + df_stats[stat_to_sort] = 0 + + # Friendly display name + df_stats["player_name"] = ( + df_stats.get("first_name", "").astype(str) + " " + df_stats.get("last_name", "").astype(str) + ).str.strip() + + # Sort by selected stat (desc) + df_sorted = df_stats.sort_values(by=stat_to_sort, ascending=False, na_position="last") + + st.subheader(f"Top Players by **{stat_to_sort}**") + top_n = st.slider("Show Top N", 5, 50, 15, 5) + + # Chart + chart_cols = ["player_name", stat_to_sort, "position", "current_salary", "expected_salary"] + for c in chart_cols: + if c not in df_sorted.columns: + df_sorted[c] = None + chart_df = df_sorted.head(top_n)[chart_cols].copy() + + fig = px.bar( + chart_df, + x=stat_to_sort, + y="player_name", + color="position" if "position" in chart_df.columns else None, + orientation="h", + title=f"Top {min(top_n, len(chart_df))} by {stat_to_sort}" + ) + st.plotly_chart(fig, use_container_width=True) + + # Detailed table + cols_to_show = [ + "player_id", "player_name", "position", "age", "years_exp", + "current_team", "current_salary", "expected_salary", + "avg_points", "avg_rebounds", "avg_assists", "avg_steals", "avg_blocks", + "avg_turnovers", "avg_plus_minus", "avg_minutes", "avg_shooting_pct" + ] + present_cols = [c for c in cols_to_show if c in df_sorted.columns] + st.subheader("Results Table") + st.dataframe(df_sorted[present_cols], use_container_width=True, hide_index=True) +else: + st.info("Set filters above and click **Search Players**.") + +# Optional: debug footer +with st.expander("Debug Info"): + st.write({ + "BASE_URL": BASE_URL, + "players_path": "/basketball/players", + "stats_path": "/basketball/players//stats" + }) diff --git a/app/src/pages/11_Prediction.py b/app/src/pages/11_Prediction.py deleted file mode 100644 index a5a322a2f4..0000000000 --- a/app/src/pages/11_Prediction.py +++ /dev/null @@ -1,38 +0,0 @@ -import logging -logger = logging.getLogger(__name__) - -import streamlit as st -from modules.nav import SideBarLinks -import requests - -st.set_page_config(layout = 'wide') - -# Display the appropriate sidebar links for the role of the logged in user -SideBarLinks() - -st.title('Prediction with Regression') - -# create a 2 column layout -col1, col2 = st.columns(2) - -# add one number input for variable 1 into column 1 -with col1: - var_01 = st.number_input('Variable 01:', - step=1) - -# add another number input for variable 2 into column 2 -with col2: - var_02 = st.number_input('Variable 02:', - step=1) - -logger.info(f'var_01 = {var_01}') -logger.info(f'var_02 = {var_02}') - -# add a button to use the values entered into the number field to send to the -# prediction function via the REST API -if st.button('Calculate Prediction', - type='primary', - use_container_width=True): - results = requests.get(f'http://api:4000/c/prediction/{var_01}/{var_02}').json() - st.dataframe(results) - \ No newline at end of file diff --git a/app/src/pages/12_API_Test.py b/app/src/pages/12_API_Test.py deleted file mode 100644 index 74883c5a85..0000000000 --- a/app/src/pages/12_API_Test.py +++ /dev/null @@ -1,25 +0,0 @@ -import logging -logger = logging.getLogger(__name__) -import streamlit as st -import requests -from streamlit_extras.app_logo import add_logo -from modules.nav import SideBarLinks - -SideBarLinks() - -st.write("# Accessing a REST API from Within Streamlit") - -""" -Simply retrieving data from a REST api running in a separate Docker Container. - -If the container isn't running, this will be very unhappy. But the Streamlit app -should not totally die. -""" -data = {} -try: - data = requests.get('http://api:4000/data').json() -except: - st.write("**Important**: Could not connect to sample api, so using dummy data.") - data = {"a":{"b": "123", "c": "hello"}, "z": {"b": "456", "c": "goodbye"}} - -st.dataframe(data) diff --git a/app/src/pages/12_Player_Comparison.py b/app/src/pages/12_Player_Comparison.py new file mode 100644 index 0000000000..263ecd40a7 --- /dev/null +++ b/app/src/pages/12_Player_Comparison.py @@ -0,0 +1,240 @@ +# pages/Player_Comparison.py +import os +import logging +import requests +import pandas as pd +import streamlit as st +import plotly.express as px +import plotly.graph_objects as go +from modules.nav import SideBarLinks + +logger = logging.getLogger(__name__) +st.set_page_config(page_title="Player Comparison", layout="wide") +SideBarLinks() + +st.title("🆚 Player Comparison") +st.caption("Compare two players side-by-side, view historical results, and analyze recent box scores.") + +# ------------------------------------------------------------------------------------ +# Config +# ------------------------------------------------------------------------------------ +BASE_URL = os.getenv("API_BASE_URL", "http://api:4000") + +# ------------------------------------------------------------------------------------ +# API helpers +# ------------------------------------------------------------------------------------ +def api_get(path: str, params: dict | None = None): + try: + url = f"{BASE_URL}{path}" + r = requests.get(url, params=params, timeout=25) + if r.status_code in (200, 201): + return r.json() + st.error(f"API {r.status_code}: {r.text}") + except Exception as e: + st.error(f"Connection error: {e}") + return None + +@st.cache_data(ttl=180) +def load_all_players(position=None, team_id=None): + params = {} + if position: params["position"] = position + if team_id: params["team_id"] = team_id + data = api_get("/basketball/players", params) + rows = data["players"] if isinstance(data, dict) and "players" in data else (data or []) + df = pd.DataFrame(rows) + # Build a friendly display label + if not df.empty: + df["display"] = df["first_name"].astype(str) + " " + df["last_name"].astype(str) + \ + df.apply(lambda x: f" · {x['position']}" if pd.notna(x.get("position")) else "", axis=1) + return df + +@st.cache_data(ttl=180) +def fetch_player_stats(player_id: int, season: str | None, game_type: str | None): + params = {} + if season: params["season"] = season + if game_type: params["game_type"] = game_type # your route maps to is_playoff internally + resp = api_get(f"/basketball/players/{player_id}/stats", params) + if not resp: return {}, pd.DataFrame() + stats = resp.get("stats", {}) or {} + recent = pd.DataFrame(resp.get("recent_games", []) or []) + # Normalize expected columns (rename if your API uses 'date' vs 'game_date', etc.) + if "game_date" not in recent.columns and "date" in recent.columns: + recent = recent.rename(columns={"date":"game_date"}) + return stats, recent + +# ------------------------------------------------------------------------------------ +# Filters +# ------------------------------------------------------------------------------------ +st.header("Filters") + +fcol1, fcol2, fcol3, fcol4 = st.columns([1.5, 1.5, 1, 1]) +with fcol1: + position_filter = st.selectbox("Position Filter (optional)", + ["", "PG", "SG", "SF", "PF", "C", "Guard", "Forward", "Center"], index=0) + position_filter = position_filter or None +with fcol2: + team_id_text = st.text_input("Team ID Filter (optional)", value="") + team_id_filter = int(team_id_text) if team_id_text.strip().isdigit() else None +with fcol3: + season = st.text_input("Season (optional)", value="", help="e.g., 2024-25 or 2025") + season = season.strip() or None +with fcol4: + game_type = st.selectbox("Game Type", ["All", "regular", "playoff"], index=0) + game_type = None if game_type == "All" else game_type + +st.markdown("") + +# Load player list with optional filters for easier searching +players_df = load_all_players(position=position_filter, team_id=team_id_filter) +if players_df.empty: + st.warning("No players available. Adjust filters or load data.") + st.stop() + +# Search boxes for two players +scol1, scol2 = st.columns(2) +with scol1: + search1 = st.text_input("Search Player 1 by name", value="") + df1 = players_df.copy() + if search1.strip(): + mask = df1["display"].str.contains(search1.strip(), case=False, na=False) + df1 = df1[mask] + player1 = st.selectbox("Select Player 1", options=df1["display"].tolist(), index=0 if not df1.empty else None) +with scol2: + search2 = st.text_input("Search Player 2 by name", value="") + df2 = players_df.copy() + if search2.strip(): + mask = df2["display"].str.contains(search2.strip(), case=False, na=False) + df2 = df2[mask] + default_idx = 1 if len(df2) > 1 else 0 + player2 = st.selectbox("Select Player 2", options=df2["display"].tolist(), index=default_idx if not df2.empty else None) + +# Resolve to player_id +def pick_id(df, display): + if df.empty or not display: return None + row = df[df["display"] == display].head(1) + return int(row["player_id"].iloc[0]) if not row.empty else None + +p1_id = pick_id(players_df, player1) +p2_id = pick_id(players_df, player2) + +run = st.button("Compare Players", type="primary") +st.markdown("---") + +if run and p1_id and p2_id: + # Pull stats + recent games (box scores) + p1_stats, p1_recent = fetch_player_stats(p1_id, season, game_type) + p2_stats, p2_recent = fetch_player_stats(p2_id, season, game_type) + + # Basic info cards + c1, c2 = st.columns(2) + def name_for(pid): + row = players_df[players_df["player_id"] == pid] + if row.empty: return "Unknown" + fn = row["first_name"].iloc[0] + ln = row["last_name"].iloc[0] + pos = row.get("position", pd.Series([""])).iloc[0] + team = row.get("current_team", pd.Series([""])).iloc[0] + return f"{fn} {ln} ({pos}) · {team}" + + with c1: + st.subheader("Player 1") + st.info(name_for(p1_id)) + # Show a few headline metrics if present + for label, key in [ + ("Games Played", "games_played"), + ("Avg Points", "avg_points"), + ("Avg Rebounds", "avg_rebounds"), + ("Avg Assists", "avg_assists"), + ("Avg Plus/Minus", "avg_plus_minus"), + ("Avg Minutes", "avg_minutes"), + ]: + if key in p1_stats and p1_stats[key] is not None: + st.metric(label, p1_stats[key]) + with c2: + st.subheader("Player 2") + st.info(name_for(p2_id)) + for label, key in [ + ("Games Played", "games_played"), + ("Avg Points", "avg_points"), + ("Avg Rebounds", "avg_rebounds"), + ("Avg Assists", "avg_assists"), + ("Avg Plus/Minus", "avg_plus_minus"), + ("Avg Minutes", "avg_minutes"), + ]: + if key in p2_stats and p2_stats[key] is not None: + st.metric(label, p2_stats[key]) + + st.markdown("---") + + # Radar chart comparison + radar_cols = ["avg_points", "avg_rebounds", "avg_assists", "avg_steals", "avg_blocks", "avg_turnovers", "avg_plus_minus", "avg_minutes", "avg_shooting_pct"] + p1_vals = [float(p1_stats.get(k, 0) or 0) for k in radar_cols] + p2_vals = [float(p2_stats.get(k, 0) or 0) for k in radar_cols] + display_names = [name_for(p1_id), name_for(p2_id)] + + rfig = go.Figure() + rfig.add_trace(go.Scatterpolar(r=p1_vals, theta=radar_cols, fill='toself', name=display_names[0])) + rfig.add_trace(go.Scatterpolar(r=p2_vals, theta=radar_cols, fill='toself', name=display_names[1])) + rfig.update_layout( + polar=dict(radialaxis=dict(visible=True)), + title="Averages Radar" + ) + st.plotly_chart(rfig, use_container_width=True) + + # Historical results / box scores (recent games) + st.subheader("Recent Box Scores (Historical Games)") + ng = st.slider("Number of Recent Games to Show", 5, 25, 10, 5) + + # Normalize/ensure columns for plotting + for df_recent in (p1_recent, p2_recent): + if not df_recent.empty: + # expected: game_id, game_date, home_team, away_team, points, rebounds, assists, minutes_played + # Handle potential column names + if "points" not in df_recent.columns and "PTS" in df_recent.columns: + df_recent.rename(columns={"PTS": "points"}, inplace=True) + if "rebounds" not in df_recent.columns and "REB" in df_recent.columns: + df_recent.rename(columns={"REB": "rebounds"}, inplace=True) + if "assists" not in df_recent.columns and "AST" in df_recent.columns: + df_recent.rename(columns={"AST": "assists"}, inplace=True) + + # Line chart of points over time + if not p1_recent.empty or not p2_recent.empty: + lp1 = p1_recent.head(ng).copy() + lp2 = p2_recent.head(ng).copy() + lp1["player"] = display_names[0] + lp2["player"] = display_names[1] + chart_cols = [] + if not lp1.empty: chart_cols.append(lp1) + if not lp2.empty: chart_cols.append(lp2) + if chart_cols: + combined = pd.concat(chart_cols, ignore_index=True) + # Use game_date or fallback to index + if "game_date" in combined.columns: + combined["game_date"] = pd.to_datetime(combined["game_date"], errors="coerce") + x = "game_date" + else: + combined["n"] = combined.groupby("player").cumcount() + 1 + x = "n" + if "points" in combined.columns: + lfig = px.line(combined, x=x, y="points", color="player", markers=True, title="Points in Recent Games") + st.plotly_chart(lfig, use_container_width=True) + + # Side-by-side tables + t1, t2 = st.columns(2) + with t1: + st.markdown(f"**{display_names[0]} — Recent Games (Top {ng})**") + st.dataframe(p1_recent.head(ng), use_container_width=True, hide_index=True) + with t2: + st.markdown(f"**{display_names[1]} — Recent Games (Top {ng})**") + st.dataframe(p2_recent.head(ng), use_container_width=True, hide_index=True) + +else: + st.info("Choose two players above and press **Compare Players** to view stats, trends, and box scores.") + +# Debug +with st.expander("Debug Info"): + st.write({ + "BASE_URL": BASE_URL, + "players_path": "/basketball/players", + "stats_path": "/basketball/players//stats" + }) diff --git a/app/src/pages/13_Classification.py b/app/src/pages/13_Classification.py deleted file mode 100644 index be2535c49d..0000000000 --- a/app/src/pages/13_Classification.py +++ /dev/null @@ -1,57 +0,0 @@ -import logging -logger = logging.getLogger(__name__) -import streamlit as st -import pandas as pd -from sklearn import datasets -from sklearn.ensemble import RandomForestClassifier -from streamlit_extras.app_logo import add_logo -from modules.nav import SideBarLinks - -SideBarLinks() - -st.write(""" -# Simple Iris Flower Prediction App - -This example is borrowed from [The Data Professor](https://github.com/dataprofessor/streamlit_freecodecamp/tree/main/app_7_classification_iris) - -This app predicts the **Iris flower** type! -""") - -st.sidebar.header('User Input Parameters') - -def user_input_features(): - sepal_length = st.sidebar.slider('Sepal length', 4.3, 7.9, 5.4) - sepal_width = st.sidebar.slider('Sepal width', 2.0, 4.4, 3.4) - petal_length = st.sidebar.slider('Petal length', 1.0, 6.9, 1.3) - petal_width = st.sidebar.slider('Petal width', 0.1, 2.5, 0.2) - data = {'sepal_length': sepal_length, - 'sepal_width': sepal_width, - 'petal_length': petal_length, - 'petal_width': petal_width} - features = pd.DataFrame(data, index=[0]) - return features - -df = user_input_features() - -st.subheader('User Input parameters') -st.write(df) - -iris = datasets.load_iris() -X = iris.data -Y = iris.target - -clf = RandomForestClassifier() -clf.fit(X, Y) - -prediction = clf.predict(df) -prediction_proba = clf.predict_proba(df) - -st.subheader('Class labels and their corresponding index number') -st.write(iris.target_names) - -st.subheader('Prediction') -st.write(iris.target_names[prediction]) -#st.write(prediction) - -st.subheader('Prediction Probability') -st.write(prediction_proba) \ No newline at end of file diff --git a/app/src/pages/13_Historical_Game_Results.py b/app/src/pages/13_Historical_Game_Results.py new file mode 100644 index 0000000000..da4a553246 --- /dev/null +++ b/app/src/pages/13_Historical_Game_Results.py @@ -0,0 +1,265 @@ +# pages/Game_Search.py +import os +import logging +from datetime import date +import requests +import pandas as pd +import streamlit as st +import plotly.express as px +from modules.nav import SideBarLinks + +# ------------------------------------------------------------------- +# Page setup + Nav +# ------------------------------------------------------------------- +logger = logging.getLogger(__name__) +st.set_page_config(page_title="Game Search & Box Scores", layout="wide") +SideBarLinks() # your shared sidebar nav/logo/auth +st.title("📅 Game Search & Box Scores") + +# If Streamlit runs in Docker next to the API use http://api:4000 +# If running on your host, set API_BASE_URL=http://localhost:4000 +BASE_URL = os.getenv("API_BASE_URL", "http://api:4000") + +# ------------------------------------------------------------------- +# Helpers +# ------------------------------------------------------------------- +def api_get(path: str, params: dict | None = None): + try: + url = f"{BASE_URL}{path}" + r = requests.get(url, params=params, timeout=25) + if r.status_code in (200, 201): + return r.json() + st.error(f"API {r.status_code}: {r.text}") + except Exception as e: + st.error(f"Connection error: {e}") + return None + +@st.cache_data(ttl=300) +def load_teams() -> pd.DataFrame: + """Fetch teams for name-based selection.""" + data = api_get("/basketball/teams", None) + rows = data["teams"] if isinstance(data, dict) and "teams" in data else (data or []) + df = pd.DataFrame(rows) + # Expected columns: team_id, name, abrv, city ... + if not df.empty: + df["display"] = df.apply( + lambda x: f"{x.get('name','')}" + (f" ({x.get('abrv','')})" if pd.notna(x.get('abrv')) else ""), + axis=1 + ) + return df + +@st.cache_data(ttl=180) +def search_games(start_date: str | None, end_date: str | None, + season: str | None, game_type: str | None, status: str | None) -> dict | None: + """We do NOT pass team_id here; we'll filter by team name client-side.""" + params = {} + if start_date: params["start_date"] = start_date + if end_date: params["end_date"] = end_date + if season: params["season"] = season + if game_type: params["game_type"] = game_type + if status: params["status"] = status + return api_get("/basketball/games", params) + +@st.cache_data(ttl=180) +def get_game_details(game_id: int) -> dict | None: + return api_get(f"/basketball/games/{game_id}") + +def fmt_game_row(g: dict) -> str: + gd = g.get("game_date") or g.get("date") or "" + gt = g.get("game_time") or "" + home = g.get("home_team_name") or str(g.get("home_team_id")) + away = g.get("away_team_name") or str(g.get("away_team_id")) + score = f"{g.get('away_score','?')} - {g.get('home_score','?')}" + return f"[{g.get('game_id')}] {gd} {gt} — {away} @ {home} ({score})" + +def safe_df(rows) -> pd.DataFrame: + try: + return pd.DataFrame(rows or []) + except Exception: + return pd.DataFrame() + +def filter_games_by_team_names(games: list[dict], names: list[str]) -> list[dict]: + """Filter games where selected team names participate. + If one name selected: include games where home OR away matches. + If two+ names selected: include games where ALL selected appear (typically 2).""" + if not names: + return games + wanted = {n.strip().lower() for n in names if n and n.strip()} + out = [] + for g in games: + home = (g.get("home_team_name") or "").strip().lower() + away = (g.get("away_team_name") or "").strip().lower() + teams_in_game = {home, away} + # If only one name: match either side; if 2+, require all selected present. + if (len(wanted) == 1 and wanted & teams_in_game) or (len(wanted) >= 2 and wanted.issubset(teams_in_game)): + out.append(g) + return out + +# ------------------------------------------------------------------- +# Filters (top) +# ------------------------------------------------------------------- +st.header("Search Filters") + +teams_df = load_teams() +team_options = teams_df["name"].dropna().sort_values().unique().tolist() if not teams_df.empty else [] + +f1, f2, f3 = st.columns([2, 1, 1]) +with f1: + team_names = st.multiselect( + "Team Name(s)", + options=team_options, + help="Pick one team to see all its games, or two teams to see only head-to-head games." + ) +with f2: + sd = st.date_input("Start Date", value=None, min_value=date(2000,1,1), help="YYYY-MM-DD (inclusive)") +with f3: + ed = st.date_input("End Date", value=None, min_value=date(2000,1,1), help="YYYY-MM-DD (inclusive)") + +f4, f5, f6 = st.columns(3) +with f4: + season = st.text_input("Season (optional)", value="", help="e.g., 2024-25 or 2025") + season = season.strip() or None +with f5: + game_type = st.selectbox("Game Type", ["", "regular", "playoff"], index=0) + game_type = game_type or None +with f6: + status = st.selectbox("Status", ["", "scheduled", "in_progress", "completed"], index=0) + status = status or None + +go = st.button("Search Games", type="primary") +st.markdown("---") + +# ------------------------------------------------------------------- +# Results + selection +# ------------------------------------------------------------------- +games_df = pd.DataFrame() +selected_game_id = None + +if go: + start_date = sd.isoformat() if isinstance(sd, date) else None + end_date = ed.isoformat() if isinstance(ed, date) else None + + payload = search_games( + start_date=start_date, + end_date=end_date, + season=season, + game_type=game_type, + status=status + ) + + if not payload or "games" not in payload: + st.warning("No games returned. Adjust filters and try again.") + else: + games = payload["games"] or [] + # Filter by chosen team names + games = filter_games_by_team_names(games, team_names) + + games_df = safe_df(games) + + # Quick summary + s = payload.get("summary", {}) + # Recompute totals after filtering by names + total = len(games) + completed = len([g for g in games if g.get("status") == "completed"]) + scheduled = len([g for g in games if g.get("status") == "scheduled"]) + in_prog = len([g for g in games if g.get("status") == "in_progress"]) + + c1, c2, c3, c4 = st.columns(4) + c1.metric("Total", total) + c2.metric("Completed", completed) + c3.metric("Scheduled", scheduled) + c4.metric("In Progress", in_prog) + + if games_df.empty: + st.info("No games match these filters.") + else: + options = [fmt_game_row(g) for g in games] + st.subheader("Matching Games") + sel = st.selectbox("Choose a game", options=options, index=0) + + if sel: + try: + selected_game_id = int(sel.split("]")[0].strip("[")) + except Exception: + selected_game_id = int(games[0]["game_id"]) + + # Display list + show_cols = [ + "game_id", "game_date", "game_time", + "home_team_name", "away_team_name", + "home_score", "away_score", + "season", "game_type", "status", "venue" + ] + present = [c for c in show_cols if c in games_df.columns] + st.dataframe(games_df[present], use_container_width=True, hide_index=True) + +st.markdown("---") + +# ------------------------------------------------------------------- +# Game details + player box scores +# ------------------------------------------------------------------- +if selected_game_id: + details = get_game_details(selected_game_id) + if not details or "game_details" not in details: + st.error("Failed to load game details.") + else: + g = details["game_details"] + home_team = g.get("home_team_name") or str(g.get("home_team_id")) + away_team = g.get("away_team_name") or str(g.get("away_team_id")) + st.header(f"Game #{g.get('game_id')} — {away_team} @ {home_team}") + + # Top summary + s1, s2, s3, s4 = st.columns(4) + s1.metric("Date", g.get("game_date", "")) + s2.metric("Time", g.get("game_time", "")) + s3.metric("Score", f"{g.get('away_score','?')} - {g.get('home_score','?')}") + s4.metric("Status", g.get("status","")) + + # Player stats DataFrames + def norm_df(rows): + df = safe_df(rows) + for col in ["points", "rebounds", "assists", "steals", "blocks", "turnovers", "minutes_played"]: + if col not in df.columns: df[col] = None + if "first_name" in df.columns and "last_name" in df.columns and "player_name" not in df.columns: + df["player_name"] = (df["first_name"].astype(str) + " " + df["last_name"].astype(str)).str.strip() + return df + + home_df = norm_df(details.get("home_team_stats")) + away_df = norm_df(details.get("away_team_stats")) + + st.subheader("Player Box Scores") + c_home, c_away = st.columns(2) + with c_home: + st.markdown(f"**{home_team} — Players**") + hcols = ["player_name", "position", "points", "rebounds", "assists", "steals", "blocks", "turnovers", "minutes_played"] + hpresent = [c for c in hcols if c in home_df.columns] + st.dataframe(home_df[hpresent], use_container_width=True, hide_index=True) + with c_away: + st.markdown(f"**{away_team} — Players**") + acols = ["player_name", "position", "points", "rebounds", "assists", "steals", "blocks", "turnovers", "minutes_played"] + apresent = [c for c in acols if c in away_df.columns] + st.dataframe(away_df[apresent], use_container_width=True, hide_index=True) + + # Quick charts: Top scorers + st.subheader("Top Scorers") + top_n = st.slider("Top N", 3, 15, 8) + if not home_df.empty and "points" in home_df.columns: + hchart = home_df.nlargest(top_n, "points")[["player_name", "points"]].copy() + hfig = px.bar(hchart, x="points", y="player_name", orientation="h", title=f"{home_team} — Top {min(top_n, len(hchart))} by Points") + st.plotly_chart(hfig, use_container_width=True) + if not away_df.empty and "points" in away_df.columns: + achart = away_df.nlargest(top_n, "points")[["player_name", "points"]].copy() + afig = px.bar(achart, x="points", y="player_name", orientation="h", title=f"{away_team} — Top {min(top_n, len(achart))} by Points") + st.plotly_chart(afig, use_container_width=True) + +else: + st.info("Pick one or two Team Names and set any date/season filters, then press **Search Games**. Select a game to view full box scores.") + +# Debug footer +with st.expander("Debug Info"): + st.write({ + "BASE_URL": BASE_URL, + "teams_path": "/basketball/teams", + "games_path": "/basketball/games", + "game_details_path": "/basketball/games/" + }) diff --git a/app/src/pages/20_Admin_Home.py b/app/src/pages/20_Admin_Home.py deleted file mode 100644 index 0dbd0f36b4..0000000000 --- a/app/src/pages/20_Admin_Home.py +++ /dev/null @@ -1,17 +0,0 @@ -import logging -logger = logging.getLogger(__name__) - -import streamlit as st -from modules.nav import SideBarLinks -import requests - -st.set_page_config(layout = 'wide') - -SideBarLinks() - -st.title('System Admin Home Page') - -if st.button('Update ML Models', - type='primary', - use_container_width=True): - st.switch_page('pages/21_ML_Model_Mgmt.py') \ No newline at end of file diff --git a/app/src/pages/20_Data_Engineer_Home.py b/app/src/pages/20_Data_Engineer_Home.py new file mode 100644 index 0000000000..357aad02cf --- /dev/null +++ b/app/src/pages/20_Data_Engineer_Home.py @@ -0,0 +1,27 @@ +import logging +logger = logging.getLogger(__name__) +import streamlit as st +from modules.nav import SideBarLinks + +st.set_page_config(layout='wide') +SideBarLinks() + +st.title(f"Welcome Data Engineer, {st.session_state.get('first_name', 'Guest')}.") +st.write('') +st.write('') +st.write('### What would you like to do today?') + +if st.button('Data Pipelines', + type='primary', + use_container_width=True): + st.switch_page('pages/21_Data_Pipelines.py') + +if st.button('System Health', + type='primary', + use_container_width=True): + st.switch_page('pages/22_System_Health.py') + +if st.button('Data Logs and Cleanup', + type='primary', + use_container_width=True): + st.switch_page('pages/23_Data_Cleanup.py') \ No newline at end of file diff --git a/app/src/pages/21_Data_Pipelines.py b/app/src/pages/21_Data_Pipelines.py new file mode 100644 index 0000000000..5a17cbfb54 --- /dev/null +++ b/app/src/pages/21_Data_Pipelines.py @@ -0,0 +1,429 @@ +import streamlit as st +import pandas as pd +import requests +from datetime import datetime +import logging +import time +from modules.nav import SideBarLinks + +logger = logging.getLogger(__name__) + +st.set_page_config( + page_title="Data Pipelines - BallWatch", + layout="wide" +) + +SideBarLinks() + +if 'api_base_url' not in st.session_state: + st.session_state.api_base_url = 'http://api:4000/' + +if 'debug_mode' not in st.session_state: + st.session_state.debug_mode = False + +def api_get(endpoint): + full_url = f"{st.session_state.api_base_url}{endpoint}" + try: + if st.session_state.debug_mode: + with st.expander("Debug Info", expanded=False): + st.write(f"**GET Request:** `{full_url}`") + st.write(f"**Timestamp:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + + response = requests.get(full_url, timeout=10) + + if response.status_code == 200: + try: + json_data = response.json() + return json_data + except ValueError: + logger.error(f"Invalid JSON response from {endpoint}") + return None + else: + logger.error(f"API Error {response.status_code} for {endpoint}: {response.text[:200]}") + + if response.status_code == 404: + st.error(f"Endpoint not found: {endpoint}") + elif response.status_code == 500: + try: + error_data = response.json() + error_msg = error_data.get('error', 'Server error occurred') + st.error(f"Server Error: {error_msg}") + except: + st.error("Server error occurred. Please try again later.") + elif response.status_code == 403: + st.error("Access denied. Please check your permissions.") + elif response.status_code == 400: + st.error("Invalid request. Please check your input.") + + return None + + except requests.exceptions.ConnectionError: + logger.error(f"Connection error for {endpoint}") + st.error("Cannot connect to server. Please check if the API is running.") + return None + + except requests.exceptions.Timeout: + logger.error(f"Timeout error for {endpoint}") + st.error("Request timed out. Please try again.") + return None + + except Exception as e: + logger.error(f"Unexpected error for {endpoint}: {str(e)}") + st.error("An unexpected error occurred. Please try again.") + return None + +def api_post(endpoint, data): + full_url = f"{st.session_state.api_base_url}{endpoint}" + try: + if st.session_state.debug_mode: + with st.expander("Debug Info", expanded=False): + st.write(f"**POST Request:** `{full_url}`") + st.write(f"**Payload:** {data}") + st.write(f"**Timestamp:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + + response = requests.post( + full_url, + json=data, + headers={'Content-Type': 'application/json'}, + timeout=10 + ) + + if response.status_code in [200, 201]: + try: + json_data = response.json() + return json_data + except ValueError: + logger.error(f"Invalid JSON response from {endpoint}") + return None + else: + logger.error(f"API Error {response.status_code} for {endpoint}: {response.text[:200]}") + + if response.status_code == 404: + st.error(f"Endpoint not found: {endpoint}") + elif response.status_code == 500: + st.error("Server error occurred. Please check the backend logs.") + elif response.status_code == 400: + st.error("Invalid request data. Please check your input.") + elif response.status_code == 409: + st.error("A data load of this type is already running.") + + return None + + except requests.exceptions.ConnectionError: + logger.error(f"Connection error for {endpoint}") + st.error("Cannot connect to server. Please check if the API is running.") + return None + + except requests.exceptions.Timeout: + logger.error(f"Timeout error for {endpoint}") + st.error("Request timed out. Please try again.") + return None + + except Exception as e: + logger.error(f"Unexpected error for {endpoint}: {str(e)}") + st.error("An unexpected error occurred. Please try again.") + return None + +def api_put(endpoint, data): + full_url = f"{st.session_state.api_base_url}{endpoint}" + try: + if st.session_state.debug_mode: + with st.expander("Debug Info", expanded=False): + st.write(f"**PUT Request:** `{full_url}`") + st.write(f"**Payload:** {data}") + st.write(f"**Timestamp:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + + response = requests.put( + full_url, + json=data, + headers={'Content-Type': 'application/json'}, + timeout=10 + ) + + if response.status_code == 200: + try: + json_data = response.json() + return json_data + except ValueError: + logger.error(f"Invalid JSON response from {endpoint}") + return None + else: + logger.error(f"API Error {response.status_code} for {endpoint}: {response.text[:200]}") + + if response.status_code == 404: + st.error("Resource not found.") + elif response.status_code == 500: + st.error("Server error occurred. Please try again.") + elif response.status_code == 400: + st.error("Invalid update data.") + + return None + + except requests.exceptions.ConnectionError: + logger.error(f"Connection error for {endpoint}") + st.error("Cannot connect to server.") + return None + + except Exception as e: + logger.error(f"Unexpected error for {endpoint}: {str(e)}") + st.error("An unexpected error occurred.") + return None + +# Main Page +st.title("Data Pipelines") +st.markdown("Manage and monitor data loads for BallWatch") + +# Add debug mode toggle in sidebar +with st.sidebar: + st.divider() + st.session_state.debug_mode = st.checkbox("Debug Mode", value=st.session_state.debug_mode) + if st.session_state.debug_mode: + st.info("Debug mode enabled - verbose error details will be shown") + +# Quick Actions Section +st.subheader("Start New Data Load") +col1, col2, col3 = st.columns(3) + +with col1: + if st.button("Load Player Stats", type="primary", use_container_width=True): + with st.spinner("Starting player stats load..."): + result = api_post('/system/data-loads', { + 'load_type': 'player_stats', + 'initiated_by': 'Mike Lewis' + }) + if result: + st.success(f"Load started successfully! Load ID: {result.get('load_id')}") + st.rerun() + +with col2: + if st.button("Load Game Data", type="primary", use_container_width=True): + with st.spinner("Starting game data load..."): + result = api_post('/system/data-loads', { + 'load_type': 'game_data', + 'initiated_by': 'Mike Lewis' + }) + if result: + st.success(f"Load started successfully! Load ID: {result.get('load_id')}") + st.rerun() + +with col3: + if st.button("Load Team Data", type="primary", use_container_width=True): + with st.spinner("Starting team data load..."): + result = api_post('/system/data-loads', { + 'load_type': 'team_data', + 'initiated_by': 'Mike Lewis' + }) + if result: + st.success(f"Load started successfully! Load ID: {result.get('load_id')}") + st.rerun() + +st.divider() + +# Data Loads History Section +st.subheader("Data Load History") + +# Filters +col1, col2, col3, col4 = st.columns([2, 1, 1, 1]) +with col1: + search_term = st.text_input("Search loads", placeholder="Filter by load type or user...") +with col2: + status_filter = st.selectbox("Status", ["All", "pending", "running", "completed", "failed"]) +with col3: + days_filter = st.number_input("Days back", min_value=1, max_value=365, value=7) +with col4: + if st.button("Refresh", use_container_width=True): + st.rerun() + +# Fetch data loads +with st.spinner("Loading data..."): + endpoint = f'/system/data-loads?days={days_filter}' + if status_filter != "All": + endpoint += f'&status={status_filter}' + + loads_data = api_get(endpoint) + +if loads_data: + loads = loads_data.get('loads', []) + + if st.session_state.debug_mode: + with st.expander("Debug: Raw API Response"): + st.json(loads_data) + + if loads: + df = pd.DataFrame(loads) + + # Apply search filter + if search_term: + mask = df.apply(lambda x: search_term.lower() in str(x).lower(), axis=1) + df = df[mask] + + if not df.empty: + # Convert datetime strings to more readable format + if 'started_at' in df.columns: + df['started_at'] = pd.to_datetime(df['started_at']).dt.strftime('%Y-%m-%d %H:%M') + if 'completed_at' in df.columns: + df['completed_at'] = pd.to_datetime(df['completed_at']).dt.strftime('%Y-%m-%d %H:%M') + + # Add status indicators + df['status_display'] = df['status'].apply( + lambda x: f"COMPLETED" if x == 'completed' + else f"FAILED" if x == 'failed' + else f"RUNNING" if x == 'running' + else f"PENDING" + ) + + # Display table with better column configuration + st.dataframe( + df[['load_id', 'load_type', 'status_display', 'started_at', 'completed_at', + 'records_processed', 'records_failed', 'initiated_by']], + column_config={ + "load_id": st.column_config.NumberColumn("Load ID", width="small"), + "load_type": "Type", + "status_display": "Status", + "started_at": "Started", + "completed_at": "Completed", + "records_processed": st.column_config.NumberColumn("Processed", format="%d"), + "records_failed": st.column_config.NumberColumn("Failed", format="%d"), + "initiated_by": "Initiated By" + }, + use_container_width=True, + hide_index=True + ) + + # Handle running loads with auto-refresh + running_loads = df[df['status'] == 'running'] + if not running_loads.empty: + st.info(f"{len(running_loads)} load(s) currently running. Page will auto-refresh.") + time.sleep(2) + st.rerun() + + # Summary metrics + st.divider() + col1, col2, col3, col4 = st.columns(4) + + with col1: + st.metric("Total Loads", len(df)) + with col2: + completed_loads = len(df[df['status'] == 'completed']) + st.metric("Completed", completed_loads) + with col3: + failed_loads = len(df[df['status'] == 'failed']) + st.metric("Failed", failed_loads, delta=f"-{failed_loads}" if failed_loads > 0 else None) + with col4: + total_processed = df['records_processed'].sum() if 'records_processed' in df.columns else 0 + st.metric("Total Records", f"{total_processed:,}") + + else: + st.info(f"No data loads found matching your filters.") + else: + st.info("No data loads found in the database. Start a new load using the buttons above.") + + # Show what's in the response for debugging + if st.session_state.debug_mode: + st.write("API Response keys:", list(loads_data.keys())) +else: + # Show a simple warning without all the debug details + st.warning("Unable to connect to the API server") + + # Add a helpful expander with troubleshooting steps + with st.expander("Troubleshooting Steps"): + st.markdown(""" + 1. **Check if the API server is running** + - For Docker: `docker-compose ps` + - For local: Check the terminal running Flask + + 2. **Verify the DataLoads table exists** + ```sql + CREATE TABLE IF NOT EXISTS DataLoads ( + load_id INT AUTO_INCREMENT PRIMARY KEY, + load_type VARCHAR(50) NOT NULL, + status ENUM('pending', 'running', 'completed', 'failed') DEFAULT 'pending', + started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + completed_at TIMESTAMP NULL, + records_processed INT DEFAULT 0, + records_failed INT DEFAULT 0, + error_message TEXT, + source_file VARCHAR(255), + initiated_by VARCHAR(100) + ); + ``` + + 3. **Check Flask logs for errors** + - Docker: `docker-compose logs api` + - Local: Check Flask terminal output + """) + + # Show demo data + st.info("Showing demo data for visualization purposes") + + mock_data = [ + { + 'load_id': 1, + 'load_type': 'player_stats', + 'status': 'completed', + 'started_at': '2025-01-25 02:00:00', + 'completed_at': '2025-01-25 02:15:30', + 'records_processed': 450, + 'records_failed': 0, + 'initiated_by': 'system' + }, + { + 'load_id': 2, + 'load_type': 'game_data', + 'status': 'running', + 'started_at': '2025-01-25 14:30:00', + 'completed_at': None, + 'records_processed': 234, + 'records_failed': 0, + 'initiated_by': 'Mike Lewis' + } + ] + + df = pd.DataFrame(mock_data) + df['status_display'] = df['status'].apply( + lambda x: f"COMPLETED" if x == 'completed' else f"RUNNING" + ) + + st.dataframe( + df[['load_id', 'load_type', 'status_display', 'started_at', 'completed_at', + 'records_processed', 'records_failed', 'initiated_by']], + use_container_width=True, + hide_index=True + ) + +st.divider() + +# System Health Check +st.subheader("System Health Status") + +with st.spinner("Checking system health..."): + health_data = api_get('/system/health') + +if health_data: + col1, col2, col3, col4 = st.columns(4) + + with col1: + status = health_data.get('status', 'unknown') + st.metric("System Status", status.upper()) + + with col2: + db_status = health_data.get('database_status', 'unknown') + st.metric("Database", db_status.upper()) + + with col3: + errors = health_data.get('recent_errors_24h', 0) + st.metric("Errors (24h)", errors) + + with col4: + active_loads = health_data.get('active_data_loads', 0) + st.metric("Active Loads", active_loads) +else: + col1, col2, col3, col4 = st.columns(4) + with col1: + st.metric("System Status", "OFFLINE") + with col2: + st.metric("Database", "UNKNOWN") + with col3: + st.metric("Errors (24h)", "N/A") + with col4: + st.metric("Active Loads", "N/A") \ No newline at end of file diff --git a/app/src/pages/21_ML_Model_Mgmt.py b/app/src/pages/21_ML_Model_Mgmt.py deleted file mode 100644 index 148978c24b..0000000000 --- a/app/src/pages/21_ML_Model_Mgmt.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging -logger = logging.getLogger(__name__) -import streamlit as st -from modules.nav import SideBarLinks -import requests - -st.set_page_config(layout = 'wide') - -SideBarLinks() - -st.title('App Administration Page') - -st.write('\n\n') -st.write('## Model 1 Maintenance') - -st.button("Train Model 01", - type = 'primary', - use_container_width=True) - -st.button('Test Model 01', - type = 'primary', - use_container_width=True) - -if st.button('Model 1 - get predicted value for 10, 25', - type = 'primary', - use_container_width=True): - results = requests.get('http://api:4000/c/prediction/10/25').json() - st.dataframe(results) diff --git a/app/src/pages/22_System_Health.py b/app/src/pages/22_System_Health.py new file mode 100644 index 0000000000..d1d4283816 --- /dev/null +++ b/app/src/pages/22_System_Health.py @@ -0,0 +1,403 @@ +import streamlit as st +import pandas as pd +import requests +from datetime import datetime +import time +import logging +from modules.nav import SideBarLinks + +logger = logging.getLogger(__name__) + +st.set_page_config( + page_title="System Health - BallWatch", + layout="wide" +) + +SideBarLinks() + +if 'api_base_url' not in st.session_state: + st.session_state.api_base_url = 'http://api:4000' +if 'auto_refresh' not in st.session_state: + st.session_state.auto_refresh = False + +def api_get(endpoint): + try: + full_url = f"{st.session_state.api_base_url}{endpoint}" + response = requests.get(full_url, timeout=10) + if response.status_code == 200: + return response.json() + else: + logger.error(f"API Error {response.status_code} for {endpoint}") + st.error(f"API Error: {response.status_code}") + return None + except requests.exceptions.ConnectionError: + logger.error(f"Connection error for {endpoint}") + st.error("Cannot connect to server") + return None + except Exception as e: + logger.error(f"Error for {endpoint}: {str(e)}") + st.error(f"Error: {str(e)}") + return None + +def api_post(endpoint, data): + try: + full_url = f"{st.session_state.api_base_url}{endpoint}" + response = requests.post( + full_url, + json=data, + headers={'Content-Type': 'application/json'}, + timeout=10 + ) + if response.status_code in [200, 201]: + return response.json() + else: + logger.error(f"API Error {response.status_code} for {endpoint}") + st.error(f"API Error: {response.status_code}") + return None + except requests.exceptions.ConnectionError: + logger.error(f"Connection error for {endpoint}") + st.error("Cannot connect to server") + return None + except Exception as e: + logger.error(f"Error for {endpoint}: {str(e)}") + st.error(f"Error: {str(e)}") + return None + +col1, col2, col3 = st.columns([3, 1, 1]) +with col1: + st.title("System Health Dashboard") + st.markdown("Real-time monitoring and system status") + +with col2: + auto_refresh = st.checkbox("Auto-refresh (30s)", value=st.session_state.auto_refresh) + st.session_state.auto_refresh = auto_refresh + +with col3: + if st.button("Refresh Now", use_container_width=True): + st.rerun() + +if st.session_state.auto_refresh: + time.sleep(30) + st.rerun() + +st.divider() + +health_data = api_get('/system/health') + +if health_data: + st.subheader("System Status Overview") + + col1, col2, col3, col4 = st.columns(4) + + with col1: + status = health_data.get('overall_status', 'unknown') + if status == 'operational': + st.success("OPERATIONAL") + st.caption("All systems running normally") + elif status == 'degraded': + st.warning("DEGRADED") + st.caption("Some issues detected") + else: + st.error("CRITICAL") + st.caption("System issues present") + + with col2: + db_status = health_data.get('database_status', 'unknown') + if db_status == 'healthy': + st.success("DATABASE HEALTHY") + else: + st.error("DATABASE ISSUES") + st.caption("Database connectivity") + + with col3: + errors_24h = health_data.get('recent_errors_24h', 0) + if errors_24h == 0: + st.success("0 ERRORS") + elif errors_24h < 10: + st.warning(f"{errors_24h} ERRORS") + else: + st.error(f"{errors_24h} ERRORS") + st.caption("Last 24 hours") + + with col4: + active_loads = health_data.get('active_data_loads', 0) + if active_loads == 0: + st.info("0 ACTIVE LOADS") + else: + st.info(f"{active_loads} ACTIVE LOADS") + st.caption("Currently running") + + if health_data.get('health_check_timestamp'): + st.caption(f"Last updated: {health_data['health_check_timestamp']}") + + st.divider() + + st.subheader("System Metrics") + + metrics = health_data.get('system_metrics', {}) + if metrics: + col1, col2, col3, col4 = st.columns(4) + + with col1: + st.metric("Total Players", f"{metrics.get('total_players', 0):,}") + with col2: + st.metric("Total Teams", metrics.get('total_teams', 0)) + with col3: + st.metric("Total Games", f"{metrics.get('total_games', 0):,}") + with col4: + st.metric("Total Users", metrics.get('total_users', 0)) + + last_load = health_data.get('last_successful_load') + if last_load: + st.success(f"Last successful load: **{last_load.get('load_type', 'Unknown')}** (ID: {last_load.get('load_id', 'N/A')})") + st.caption(f"Completed at: {last_load.get('completed_at', 'Unknown')}") + +else: + st.warning("Unable to connect to system health API. Showing mock data for demonstration.") + + col1, col2, col3, col4 = st.columns(4) + + with col1: + st.success("OPERATIONAL") + st.caption("All systems running normally") + + with col2: + st.success("DATABASE HEALTHY") + st.caption("Database connectivity") + + with col3: + st.warning("3 ERRORS") + st.caption("Last 24 hours") + + with col4: + st.info("1 ACTIVE LOADS") + st.caption("Currently running") + +st.divider() + +st.subheader("Recent Error Logs") + +col1, col2, col3 = st.columns([2, 1, 1]) +with col1: + severity_filter = st.selectbox("Severity", ["All", "critical", "error", "warning", "info"]) +with col2: + days_back = st.number_input("Days back", min_value=1, max_value=30, value=7) +with col3: + resolved_filter = st.selectbox("Status", ["All", "Resolved", "Unresolved"]) + +params = [] +endpoint = f"/system/error-logs?days={days_back}" +if severity_filter != "All": + endpoint += f"&severity={severity_filter}" +if resolved_filter == "Resolved": + endpoint += "&resolved=true" +elif resolved_filter == "Unresolved": + endpoint += "&resolved=false" + +error_data = api_get(endpoint) + +if error_data: + errors = error_data.get('error_logs', []) + + if errors: + df_errors = pd.DataFrame(errors) + + df_errors['severity_display'] = df_errors['severity'].apply( + lambda x: f"CRITICAL" if x == 'critical' + else f"ERROR" if x == 'error' + else f"WARNING" if x == 'warning' + else f"INFO" + ) + + df_errors['status'] = df_errors['resolved_at'].apply( + lambda x: "RESOLVED" if pd.notna(x) else "PENDING" + ) + + if 'created_at' in df_errors.columns: + df_errors['created_at'] = pd.to_datetime(df_errors['created_at']).dt.strftime('%Y-%m-%d %H:%M') + + st.dataframe( + df_errors[['error_id', 'error_type', 'severity_display', 'module', + 'error_message', 'created_at', 'status']], + column_config={ + "error_id": st.column_config.NumberColumn("ID", width="small"), + "error_type": "Type", + "severity_display": "Severity", + "module": "Module", + "error_message": "Message", + "created_at": "Time", + "status": "Status" + }, + use_container_width=True, + hide_index=True + ) + + st.divider() + col1, col2, col3, col4 = st.columns(4) + + with col1: + critical_count = len(df_errors[df_errors['severity'] == 'critical']) + st.metric("Critical", critical_count, + delta=f"{critical_count} critical" if critical_count > 0 else None, + delta_color="inverse") + + with col2: + unresolved = len(df_errors[df_errors['resolved_at'].isna()]) + st.metric("Unresolved", unresolved, + delta=f"{unresolved} pending" if unresolved > 0 else None, + delta_color="inverse") + + with col3: + resolved = len(df_errors[df_errors['resolved_at'].notna()]) + st.metric("Resolved", resolved) + + with col4: + if len(df_errors) > 0: + resolution_rate = (resolved / len(df_errors)) * 100 + st.metric("Resolution Rate", f"{resolution_rate:.1f}%") + else: + st.success("No errors found in the specified time period.") + +else: + st.warning("Unable to fetch error logs. Showing sample data.") + + mock_errors = [ + { + 'error_id': 1, + 'error_type': 'DataQuality', + 'severity': 'warning', + 'module': 'PlayerStats', + 'error_message': 'Invalid shooting percentage detected', + 'created_at': '2025-01-25 13:45:00', + 'resolved_at': None + }, + { + 'error_id': 2, + 'error_type': 'APITimeout', + 'severity': 'error', + 'module': 'DataIngestion', + 'error_message': 'NBA API request timeout', + 'created_at': '2025-01-25 12:30:00', + 'resolved_at': '2025-01-25 12:35:00' + } + ] + + df_mock = pd.DataFrame(mock_errors) + df_mock['severity_display'] = df_mock['severity'].apply( + lambda x: f"WARNING" if x == 'warning' else f"ERROR" + ) + df_mock['status'] = df_mock['resolved_at'].apply( + lambda x: "RESOLVED" if pd.notna(x) else "PENDING" + ) + + st.dataframe( + df_mock[['error_id', 'error_type', 'severity_display', 'module', + 'error_message', 'created_at', 'status']], + use_container_width=True, + hide_index=True + ) + +st.divider() + +st.subheader("Data Validation") + +col1, col2 = st.columns(2) + +with col1: + st.write("**Run Data Validation Check**") + validation_type = st.selectbox("Validation Type", + ["integrity_check", "duplicate_check", "null_check"]) + table_name = st.selectbox("Table", ["Players", "Teams", "Game", "PlayerGameStats"]) + + if st.button("Run Validation", type="primary"): + with st.spinner("Running validation check..."): + result = api_post('/system/data-validation', { + 'validation_type': validation_type, + 'table_name': table_name, + 'run_by': 'Mike Lewis' + }) + + if result: + results = result.get('results', {}) + status = results.get('status', 'unknown') + total_records = results.get('total_records', 0) + valid_records = results.get('valid_records', 0) + invalid_records = results.get('invalid_records', 0) + validity_percentage = results.get('validity_percentage', 0) + + if status == 'passed': + st.success(f"Validation passed! {valid_records}/{total_records} records are valid ({validity_percentage}%)") + elif status == 'warning': + st.warning(f"Validation passed with warnings. {invalid_records} issues found in {total_records} records") + elif status == 'failed': + st.error(f"Validation failed. {invalid_records} issues found in {total_records} records") + else: + st.info(f"Validation completed with status: {status}") + else: + st.error("Failed to run validation") + +with col2: + st.write("**Recent Validation Results**") + validation_data = api_get('/system/data-validation?days=7') + + if validation_data: + reports = validation_data.get('validation_reports', []) + if reports: + for report in reports[-3:]: + status = report.get('status', 'unknown') + table_name = report.get('table_name', 'Unknown') + validation_type = report.get('validation_type', 'Unknown') + + if status == 'passed': + st.success(f"{table_name} - {validation_type}") + elif status == 'warning': + st.warning(f"{table_name} - {validation_type}") + else: + st.error(f"{table_name} - {validation_type}") + st.caption(f"Run on {report.get('run_date', 'Unknown')}") + else: + st.info("No recent validation reports") + else: + st.info("Validation data unavailable") + +st.divider() + +st.subheader("System Recommendations") + +recommendations = [] + +if health_data: + error_count = health_data.get('recent_errors_24h', 0) + active_loads = health_data.get('active_data_loads', 0) + db_status = health_data.get('database_status', 'unknown') + + if error_count > 10: + recommendations.append(("error", "High error rate detected. Review error logs for patterns.")) + elif error_count > 5: + recommendations.append(("warning", "Moderate error activity. Monitor for trends.")) + else: + recommendations.append(("success", "Error rate within normal parameters")) + + if active_loads > 3: + recommendations.append(("warning", "Multiple concurrent loads detected. Monitor system resources.")) + else: + recommendations.append(("success", "Data load capacity normal")) + + if db_status == 'healthy': + recommendations.append(("success", "Database connection stable")) + else: + recommendations.append(("error", "Database issues detected. Check connection settings.")) +else: + recommendations = [ + ("error", "Unable to connect to system health API"), + ("warning", "Check API connectivity and backend services"), + ("success", "Page functionality working with mock data") + ] + +for rec_type, message in recommendations: + if rec_type == "error": + st.error(message) + elif rec_type == "warning": + st.warning(message) + else: + st.success(message) \ No newline at end of file diff --git a/app/src/pages/23_Data_Cleanup.py b/app/src/pages/23_Data_Cleanup.py new file mode 100644 index 0000000000..4374754638 --- /dev/null +++ b/app/src/pages/23_Data_Cleanup.py @@ -0,0 +1,159 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +import requests +from modules.nav import SideBarLinks +from datetime import datetime, timedelta + +st.set_page_config(layout='wide') +SideBarLinks() +st.title('Data Logs Management') + +BASE_URL = "http://api:4000" + +def make_request(endpoint, method='GET', data=None): + try: + url = f"{BASE_URL}{endpoint}" + if method == 'GET': + response = requests.get(url) + elif method == 'POST': + response = requests.post(url, json=data) + elif method == 'PUT': + response = requests.put(url, json=data) + elif method == 'DELETE': + response = requests.delete(url) + + if response.status_code in [200, 201]: + return response.json() + else: + st.error(f"API Error: {response.status_code} - {response.text}") + return None + except Exception as e: + st.error(f"Connection Error: {str(e)}") + return None + +tab1, tab2 = st.tabs(["Data Errors", "Data Cleanup"]) + +with tab1: + st.header("Data Errors") + + col1, col2, col3 = st.columns(3) + with col1: + error_type_filter = st.selectbox("Error Type", + ["All", "duplicate", "missing", "invalid"], key="data_error_type") + with col2: + table_filter = st.text_input("Table Name", key="data_table") + with col3: + days_filter = st.number_input("Days to look back", min_value=1, max_value=90, value=7, key="data_days") + + if st.button("Load Data Errors", key="load_data_errors"): + params = f"?days={days_filter}" + if error_type_filter != "All": + params += f"&error_type={error_type_filter}" + if table_filter: + params += f"&table_name={table_filter}" + + endpoint = f"/system/data-errors{params}" + + data = make_request(endpoint) + + if data and 'errors' in data: + st.session_state['data_errors'] = data['errors'] + st.success(f"Found {len(data['errors'])} data errors") + + if 'data_errors' in st.session_state: + errors = st.session_state['data_errors'] + + if errors: + for error in errors: + with st.expander(f"Error #{error['data_error_id']} - {error['error_type']} in {error['table_name']}"): + col1, col2 = st.columns(2) + + with col1: + st.write(f"**Record ID:** {error['record_id']}") + st.write(f"**Field:** {error['field_name']}") + st.write(f"**Invalid Value:** {error['invalid_value']}") + st.write(f"**Expected Format:** {error['expected_format']}") + + with col2: + st.write(f"**Detected:** {error['detected_at']}") + st.write(f"**Resolved:** {error.get('resolved_at', 'Not resolved')}") + st.write(f"**Auto-fixed:** {'Yes' if error.get('auto_fixed') else 'No'}") + else: + st.info("No data errors found for the selected criteria") + +with tab2: + st.header("Data Cleanup Management") + + cleanup_tab1, cleanup_tab2 = st.tabs(["View Schedule", "Schedule Cleanup"]) + + with cleanup_tab1: + st.subheader("Current Cleanup Schedule") + + if st.button("Load Cleanup Schedule", key="load_cleanup"): + data = make_request("/system/data-cleanup") + + if data: + st.session_state['active_schedules'] = data.get('active_schedules', []) + st.session_state['cleanup_history'] = data.get('recent_history', []) + st.success("Cleanup data loaded successfully") + + if 'active_schedules' in st.session_state: + schedules = st.session_state['active_schedules'] + + if schedules: + st.write("**Active Cleanup Schedules:**") + for schedule in schedules: + with st.expander(f"{schedule['cleanup_type']} - {schedule['frequency']}"): + col1, col2 = st.columns(2) + + with col1: + st.write(f"**Schedule ID:** {schedule['schedule_id']}") + st.write(f"**Frequency:** {schedule['frequency']}") + st.write(f"**Retention Days:** {schedule['retention_days']}") + + with col2: + st.write(f"**Next Run:** {schedule['next_run']}") + st.write(f"**Last Run:** {schedule.get('last_run', 'Never')}") + st.write(f"**Created By:** {schedule['created_by']}") + + if 'cleanup_history' in st.session_state: + history = st.session_state['cleanup_history'] + + if history: + st.write("**Recent Cleanup History:**") + for item in history[:5]: + status_text = "COMPLETED" if item['status'] == 'completed' else "FAILED" + st.write(f"[{status_text}] {item['cleanup_type']} - {item['started_at']} - {item.get('records_deleted', 0)} records deleted") + + with cleanup_tab2: + st.subheader("Schedule New Cleanup") + + with st.form("schedule_cleanup_form"): + cleanup_type = st.text_input("Cleanup Type*", placeholder="e.g., old_logs, temp_files") + frequency = st.selectbox("Frequency*", ["daily", "weekly", "monthly"]) + retention_days = st.number_input("Retention Days*", min_value=1, max_value=365, value=30, + help="How many days of data to keep") + next_run_date = st.date_input("Next Run Date", value=datetime.now().date() + timedelta(days=1)) + created_by = st.text_input("Created By*", placeholder="Your username") + + if st.form_submit_button("Schedule Cleanup"): + if cleanup_type and frequency and retention_days and created_by: + next_run_datetime = datetime.combine(next_run_date, datetime.min.time()) + + cleanup_data = { + "cleanup_type": cleanup_type, + "frequency": frequency, + "retention_days": retention_days, + "next_run": next_run_datetime.isoformat(), + "created_by": created_by + } + + result = make_request("/system/data-cleanup", method='POST', data=cleanup_data) + + if result: + st.success(f"Cleanup scheduled successfully! Schedule ID: {result.get('schedule_id')}") + st.rerun() + else: + st.error("Please fill in all required fields marked with *") \ No newline at end of file diff --git a/app/src/pages/30_About.py b/app/src/pages/30_About.py deleted file mode 100644 index 07a2e9aab2..0000000000 --- a/app/src/pages/30_About.py +++ /dev/null @@ -1,18 +0,0 @@ -import streamlit as st -from streamlit_extras.app_logo import add_logo -from modules.nav import SideBarLinks - -SideBarLinks() - -st.write("# About this App") - -st.markdown ( - """ - This is a demo app for CS 3200 Course Project. - - The goal of this demo is to provide information on the tech stack - being used as well as demo some of the features of the various platforms. - - Stay tuned for more information and features to come! - """ - ) diff --git a/app/src/pages/30_Head_Coach_Home.py b/app/src/pages/30_Head_Coach_Home.py new file mode 100644 index 0000000000..d47442ba67 --- /dev/null +++ b/app/src/pages/30_Head_Coach_Home.py @@ -0,0 +1,560 @@ +################################################## +# BallWatch Basketball Analytics - Head Coach Dashboard +# Main dashboard for Head Coach persona (Marcus Thompson) +# +# User Stories Supported: +# - Marcus-3.1: View upcoming opponent analysis and scouting reports +# - Marcus-3.2: Analyze player matchups for strategic planning +# - Marcus-3.3: Review team roster and player status +# - Marcus-3.4: Evaluate lineup effectiveness and rotations +# - Marcus-3.5: Create and manage game plans +# - Marcus-3.6: Track season progress and team goals +################################################## + +import logging +import streamlit as st +import requests +import pandas as pd +from datetime import datetime, timedelta +from modules.nav import SideBarLinks + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Page configuration +st.set_page_config( + page_title="BallWatch - Head Coach Dashboard", + layout="wide", + initial_sidebar_state="expanded" +) + +# Navigation +SideBarLinks() + +# Constants +BASE_URL = "http://api:4000" +DEFAULT_TEAM_ID = st.session_state.get('team_id', 1) # Brooklyn Nets +COACH_NAME = st.session_state.get('first_name', 'Marcus') + +# Check authentication +if not st.session_state.get('authenticated', False) or st.session_state.get('role') != 'head_coach': + st.error("Access denied. Please log in as Head Coach.") + st.stop() + + +class APIClient: + """Centralized API client with error handling.""" + + @staticmethod + def make_request(endpoint, method='GET', params=None, data=None, timeout=10): + """Make API request with comprehensive error handling.""" + try: + url = f"{BASE_URL}{endpoint}" + + if method == 'GET': + response = requests.get(url, params=params, timeout=timeout) + elif method == 'POST': + response = requests.post(url, json=data, timeout=timeout) + elif method == 'PUT': + response = requests.put(url, json=data, timeout=timeout) + elif method == 'DELETE': + response = requests.delete(url, timeout=timeout) + else: + logger.error(f"Unsupported HTTP method: {method}") + return None + + if response.status_code in [200, 201]: + try: + return response.json() + except ValueError as e: + logger.error(f"Invalid JSON response: {e}") + return None + elif response.status_code == 404: + logger.warning(f"Resource not found: {endpoint}") + return None + elif response.status_code >= 500: + logger.error(f"Server error {response.status_code} for {endpoint}") + return None + else: + logger.warning(f"API returned status {response.status_code} for {endpoint}") + return None + + except requests.exceptions.Timeout: + logger.error(f"Request timeout for {endpoint}") + return None + except requests.exceptions.ConnectionError: + logger.error(f"Connection error for {endpoint}") + return None + except Exception as e: + logger.error(f"Unexpected error for {endpoint}: {e}") + return None + + @staticmethod + def safe_get(dictionary, *keys, default=None): + """Safely get nested dictionary values.""" + try: + for key in keys: + dictionary = dictionary[key] + return dictionary + except (KeyError, TypeError, AttributeError): + return default + + +# Initialize API client +api = APIClient() + + +@st.cache_data(ttl=300, show_spinner=False) +def load_team_overview(team_id): + """Load team overview data with caching.""" + return api.make_request(f"/api/teams/{team_id}") + + +@st.cache_data(ttl=180, show_spinner=False) +def load_upcoming_games(team_id, days=7): + """Load upcoming games for the team.""" + params = {"team_id": team_id, "days": days} + return api.make_request("/api/games/upcoming", params=params) + + +@st.cache_data(ttl=300, show_spinner=False) +def load_team_roster(team_id): + """Load current team roster.""" + params = {"include_stats": "true"} + return api.make_request(f"/api/teams/{team_id}/players", params=params) + + +@st.cache_data(ttl=600, show_spinner=False) +def load_season_summary(team_id): + """Load season summary data.""" + params = {"entity_type": "team", "entity_id": team_id} + return api.make_request("/api/analytics/season-summaries", params=params) + + +def render_header(): + """Render the dashboard header with current time and team info.""" + current_time = datetime.now().strftime("%I:%M %p") + current_date = datetime.now().strftime("%A, %B %d, %Y") + + col1, col2, col3 = st.columns([3, 1, 1]) + + with col1: + st.title(f"🏀 Welcome Coach {COACH_NAME}!") + st.markdown(f"**{current_date}** • {current_time}") + + with col2: + # Team logo placeholder + st.markdown("### Brooklyn Nets") + st.markdown("*Eastern Conference*") + + with col3: + # Quick status indicator + if st.button("🔄 Refresh Data", help="Refresh all dashboard data"): + st.cache_data.clear() + st.rerun() + + +def render_priority_alerts(): + """Render priority alerts and action items.""" + st.subheader("⚡ Priority Alerts") + + # Load recent team data + roster_data = load_team_roster(DEFAULT_TEAM_ID) + season_data = load_season_summary(DEFAULT_TEAM_ID) + + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### 🚨 Alerts") + + alerts = [] + + # Generate alerts based on data + if season_data: + summary = api.safe_get(season_data, 'summary') + if summary: + win_pct = api.safe_get(summary, 'wins', 0) / max(api.safe_get(summary, 'games_played', 1), 1) + if win_pct < 0.500: + alerts.append(("warning", f"Team record below .500 ({win_pct:.1%})")) + + avg_points = api.safe_get(summary, 'avg_points_scored', 0) + if avg_points > 0 and avg_points < 110: + alerts.append(("error", f"Low offensive output: {avg_points:.1f} PPG")) + + if roster_data: + injured_count = len([p for p in api.safe_get(roster_data, 'players', []) + if api.safe_get(p, 'status') == 'injured']) + if injured_count > 0: + alerts.append(("warning", f"{injured_count} player(s) on injury report")) + + # Default alerts if no data + if not alerts: + alerts = [ + ("info", "Next game preparation needed"), + ("warning", "Review bench rotation effectiveness"), + ("info", "Opponent scouting report available") + ] + + for alert_type, message in alerts[:5]: # Limit to 5 alerts + if alert_type == "error": + st.error(f"🔴 {message}") + elif alert_type == "warning": + st.warning(f"🟡 {message}") + else: + st.info(f"🔵 {message}") + + with col2: + st.markdown("#### ✅ Action Items") + + # Time-based action items + current_hour = datetime.now().hour + if current_hour < 12: # Morning + action_items = [ + "Review film from last game", + "Prepare today's practice plan", + "Check injury status updates", + "Review opponent scouting report" + ] + elif current_hour < 18: # Afternoon + action_items = [ + "Conduct team practice session", + "Hold individual player meetings", + "Finalize rotation adjustments", + "Coordinate with training staff" + ] + else: # Evening + action_items = [ + "Finalize tomorrow's game plan", + "Review starting lineup", + "Prepare media talking points", + "Update coaching staff notes" + ] + + for item in action_items: + st.markdown(f"• {item}") + + +def render_navigation_tools(): + """Render main navigation tools for coaches.""" + st.subheader("🎯 Coaching Tools") + + col1, col2, col3, col4 = st.columns(4) + + with col1: + if st.button( + "🎯 **Scouting Reports**", + use_container_width=True, + help="Analyze opponents and prepare strategic game plans" + ): + st.switch_page("pages/31_Scouting_Reports.py") + + st.caption("Opponent analysis & strategy") + + with col2: + if st.button( + "📊 **Lineup Analysis**", + use_container_width=True, + help="Optimize player combinations and rotations" + ): + st.switch_page("pages/32_Lineup_Analysis.py") + + st.caption("Rotation optimization") + + with col3: + if st.button( + "📈 **Season Summary**", + use_container_width=True, + help="Track season progress and team performance" + ): + st.switch_page("pages/33_Season_Summaries.py") + + st.caption("Progress tracking") + + with col4: + if st.button( + "🔄 **Player Matchups**", + use_container_width=True, + help="Analyze individual player matchup advantages" + ): + # Could link to a player matchup analysis page + st.info("Feature coming soon!") + + st.caption("Matchup analysis") + + +def render_team_snapshot(): + """Render current team performance snapshot.""" + st.subheader("📊 Team Snapshot") + + # Load data + roster_data = load_team_roster(DEFAULT_TEAM_ID) + season_data = load_season_summary(DEFAULT_TEAM_ID) + + col1, col2, col3, col4 = st.columns(4) + + # Extract metrics or use defaults + if season_data and api.safe_get(season_data, 'summary'): + summary = season_data['summary'] + games_played = api.safe_get(summary, 'games_played', 41) + wins = api.safe_get(summary, 'wins', 24) + losses = api.safe_get(summary, 'losses', 17) + avg_points = api.safe_get(summary, 'avg_points_scored', 114.2) + else: + games_played, wins, losses, avg_points = 41, 24, 17, 114.2 + + roster_size = len(api.safe_get(roster_data, 'players', [])) if roster_data else 15 + + with col1: + st.metric( + "Season Record", + f"{wins}-{losses}", + delta=f"{(wins/(wins+losses))*100:.1f}% Win Rate" if wins+losses > 0 else None + ) + + with col2: + st.metric("Points Per Game", f"{avg_points:.1f}", "Offense") + + with col3: + st.metric("Active Roster", str(roster_size), "Players") + + with col4: + games_remaining = 82 - games_played + st.metric("Games Remaining", str(games_remaining), "Regular Season") + + +def render_upcoming_schedule(): + """Render upcoming games and schedule.""" + st.subheader("📅 Upcoming Schedule") + + upcoming_games = load_upcoming_games(DEFAULT_TEAM_ID, days=10) + + if upcoming_games and api.safe_get(upcoming_games, 'upcoming_games'): + games = upcoming_games['upcoming_games'][:5] # Show next 5 games + + for i, game in enumerate(games, 1): + game_date = api.safe_get(game, 'game_date', 'TBD') + game_time = api.safe_get(game, 'game_time', 'TBD') + home_team = api.safe_get(game, 'home_team_name', 'TBD') + away_team = api.safe_get(game, 'away_team_name', 'TBD') + venue = api.safe_get(game, 'venue', 'TBD') + + # Determine if home or away + is_home = home_team == "Brooklyn Nets" + opponent = away_team if is_home else home_team + location = "vs" if is_home else "@" + + col1, col2, col3 = st.columns([2, 3, 1]) + + with col1: + st.write(f"**Game {i}**") + st.write(f"{game_date}") + + with col2: + st.write(f"**{location} {opponent}**") + st.write(f"{game_time} • {venue}") + + with col3: + if st.button(f"Scout", key=f"scout_{i}", help=f"View scouting report for {opponent}"): + st.session_state['selected_opponent'] = opponent + st.switch_page("pages/31_Scouting_Reports.py") + else: + # Fallback schedule + fallback_games = [ + {"date": "Jan 15", "opponent": "Boston Celtics", "location": "vs", "time": "7:30 PM"}, + {"date": "Jan 17", "opponent": "Miami Heat", "location": "@", "time": "8:00 PM"}, + {"date": "Jan 19", "opponent": "Philadelphia 76ers", "location": "vs", "time": "7:00 PM"}, + {"date": "Jan 21", "opponent": "Orlando Magic", "location": "@", "time": "7:00 PM"}, + {"date": "Jan 23", "opponent": "Charlotte Hornets", "location": "vs", "time": "7:30 PM"} + ] + + for i, game in enumerate(fallback_games, 1): + col1, col2, col3 = st.columns([2, 3, 1]) + + with col1: + st.write(f"**Game {i}**") + st.write(game['date']) + + with col2: + st.write(f"**{game['location']} {game['opponent']}**") + st.write(game['time']) + + with col3: + if st.button(f"Scout", key=f"scout_{i}", help=f"Scout {game['opponent']}"): + st.info("Scouting feature available in full version") + + +def render_player_highlights(): + """Render key player performance highlights.""" + st.subheader("🌟 Player Highlights") + + roster_data = load_team_roster(DEFAULT_TEAM_ID) + + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### 🔥 Top Performers") + + if roster_data and api.safe_get(roster_data, 'players'): + # Sort players by average points + players = roster_data['players'] + top_performers = sorted( + [p for p in players if api.safe_get(p, 'avg_points', 0) > 0], + key=lambda x: api.safe_get(x, 'avg_points', 0), + reverse=True + )[:3] + + for player in top_performers: + name = f"{api.safe_get(player, 'first_name', 'Unknown')} {api.safe_get(player, 'last_name', 'Player')}" + position = api.safe_get(player, 'position', 'N/A') + ppg = api.safe_get(player, 'avg_points', 0) + apg = api.safe_get(player, 'avg_assists', 0) + rpg = api.safe_get(player, 'avg_rebounds', 0) + + st.markdown(f"**{name}** ({position})") + st.markdown(f" 📈 {ppg:.1f} PPG, {apg:.1f} APG, {rpg:.1f} RPG") + else: + # Fallback data + fallback_players = [ + {"name": "Kevin Durant", "pos": "SF", "ppg": 29.2, "apg": 6.8, "rpg": 6.7}, + {"name": "Kyrie Irving", "pos": "PG", "ppg": 27.1, "apg": 5.3, "rpg": 4.8}, + {"name": "Nic Claxton", "pos": "C", "ppg": 12.1, "apg": 2.1, "rpg": 8.7} + ] + + for player in fallback_players: + st.markdown(f"**{player['name']}** ({player['pos']})") + st.markdown(f" 📈 {player['ppg']:.1f} PPG, {player['apg']:.1f} APG, {player['rpg']:.1f} RPG") + + with col2: + st.markdown("#### 📋 Focus Areas") + + focus_areas = [ + {"area": "Improve transition defense", "priority": "High", "status": "In Progress"}, + {"area": "Fourth quarter execution", "priority": "High", "status": "Needs Work"}, + {"area": "Bench scoring consistency", "priority": "Medium", "status": "Monitoring"}, + {"area": "Defensive rebounding", "priority": "Medium", "status": "Improved"}, + ] + + for focus in focus_areas: + priority_color = "🔴" if focus["priority"] == "High" else "🟡" if focus["priority"] == "Medium" else "🟢" + st.markdown(f"{priority_color} **{focus['area']}**") + st.markdown(f" Status: {focus['status']}") + + +def render_coaching_notes(): + """Render coaching notes and reminders.""" + st.subheader("📝 Coaching Notes") + + # Initialize session state for notes + if 'coach_notes' not in st.session_state: + st.session_state.coach_notes = [ + { + 'date': (datetime.now() - timedelta(days=1)).strftime("%b %d"), + 'note': 'Team responded well to new offensive sets in practice' + }, + { + 'date': (datetime.now() - timedelta(days=2)).strftime("%b %d"), + 'note': 'Need to address defensive rebounding issues' + } + ] + + col1, col2 = st.columns([2, 1]) + + with col1: + # Display existing notes + st.markdown("#### Recent Notes") + + for note in st.session_state.coach_notes[-5:]: # Show last 5 notes + with st.expander(f"{note['date']} - {note['note'][:50]}..."): + st.write(note['note']) + + with col2: + # Add new note + st.markdown("#### Quick Note") + + with st.form("add_note_form"): + note_text = st.text_area( + "Add a coaching note:", + height=100, + placeholder="Enter observations, reminders, or insights..." + ) + + if st.form_submit_button("💾 Save Note", use_container_width=True): + if note_text.strip(): + new_note = { + 'date': datetime.now().strftime("%b %d"), + 'note': note_text.strip() + } + st.session_state.coach_notes.append(new_note) + st.success("Note saved!") + st.rerun() + else: + st.error("Please enter a note before saving.") + + +def render_system_status(): + """Render system status and data freshness.""" + st.markdown("---") + + col1, col2, col3, col4 = st.columns(4) + + with col1: + # API status check + health_data = api.make_request("/api/health") + if health_data and api.safe_get(health_data, 'status') == 'healthy': + st.success("🟢 System Online") + else: + st.error("🔴 System Issues") + + with col2: + last_refresh = datetime.now().strftime('%I:%M %p') + st.info(f"🔄 Last Update: {last_refresh}") + + with col3: + st.info(f"👤 Coach: {COACH_NAME}") + + with col4: + st.info(f"🏀 Team ID: {DEFAULT_TEAM_ID}") + + +def main(): + """Main dashboard rendering function.""" + try: + # Render all dashboard sections + render_header() + + st.markdown("---") + render_priority_alerts() + + st.markdown("---") + render_navigation_tools() + + st.markdown("---") + render_team_snapshot() + + st.markdown("---") + + col1, col2 = st.columns([1, 1]) + + with col1: + render_upcoming_schedule() + + with col2: + render_player_highlights() + + st.markdown("---") + render_coaching_notes() + + render_system_status() + + except Exception as e: + st.error("An error occurred while loading the dashboard.") + logger.error(f"Dashboard error: {e}") + + if st.button("🔄 Retry"): + st.cache_data.clear() + st.rerun() + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/app/src/pages/31_Scouting_Reports.py b/app/src/pages/31_Scouting_Reports.py new file mode 100644 index 0000000000..7e18e7115d --- /dev/null +++ b/app/src/pages/31_Scouting_Reports.py @@ -0,0 +1,763 @@ +################################################## +# BallWatch Basketball Analytics - Opponent Scouting Reports +# Strategic analysis and game planning for coaches +# +# User Story: Marcus-3.1 - Opponent team analysis and scouting reports +# +# Features: +# - Comprehensive opponent analysis +# - Recent performance trends +# - Key player breakdowns +# - Strategic recommendations +# - Customizable game plans +################################################## + +import logging +import streamlit as st +import requests +import pandas as pd +import plotly.express as px +import plotly.graph_objects as go +from datetime import datetime, timedelta +from modules.nav import SideBarLinks, check_authentication + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Page configuration +st.set_page_config( + page_title="BallWatch - Scouting Reports", + layout="wide" +) + +# Check authentication and role +check_authentication('head_coach') +SideBarLinks() + +# Constants +BASE_URL = "http://api:4000" +DEFAULT_TEAM_ID = st.session_state.get('team_id', 1) +COACH_NAME = st.session_state.get('first_name', 'Coach') + +# Styling +st.markdown(""" + +""", unsafe_allow_html=True) + + +class ScoutingAPI: + """API client for scouting data with comprehensive error handling.""" + + @staticmethod + def make_request(endpoint, params=None, timeout=10): + """Make API request with error handling.""" + try: + url = f"{BASE_URL}{endpoint}" + response = requests.get(url, params=params, timeout=timeout) + + if response.status_code == 200: + return response.json() + else: + logger.warning(f"API request failed: {response.status_code}") + return None + + except requests.exceptions.RequestException as e: + logger.error(f"API request error: {e}") + return None + + @staticmethod + def safe_get(data, *keys, default=None): + """Safely extract nested values.""" + try: + for key in keys: + data = data[key] + return data + except (KeyError, TypeError, AttributeError): + return default + + +# Initialize API client +api = ScoutingAPI() + + +@st.cache_data(ttl=300, show_spinner=False) +def load_teams(): + """Load available teams for opponent selection.""" + teams_data = api.make_request("/api/teams") + if teams_data and 'teams' in teams_data: + return teams_data['teams'] + return None + + +@st.cache_data(ttl=180, show_spinner=False) +def load_upcoming_games(team_id): + """Load upcoming games to suggest opponents.""" + params = {"team_id": team_id, "days": 14} + return api.make_request("/api/games/upcoming", params=params) + + +@st.cache_data(ttl=300, show_spinner=False) +def load_opponent_report(team_id, opponent_id): + """Load comprehensive opponent scouting report.""" + params = { + "team_id": team_id, + "opponent_id": opponent_id, + "last_n_games": 10, + "include_players": "true", + "include_trends": "true" + } + return api.make_request("/api/analytics/opponent-reports", params=params) + + +def render_page_header(): + """Render the page header with navigation.""" + col1, col2, col3 = st.columns([2, 1, 1]) + + with col1: + st.title("🎯 Opponent Scouting Reports") + st.markdown("*Comprehensive strategic analysis for game planning*") + + with col2: + st.markdown(f"**Coach:** {COACH_NAME}") + st.markdown(f"**Team:** Brooklyn Nets") + + with col3: + if st.button("🔄 Refresh Data", help="Refresh scouting data"): + st.cache_data.clear() + st.rerun() + + +def render_opponent_selector(): + """Render opponent selection interface.""" + st.subheader("🏀 Select Opponent") + + col1, col2 = st.columns([2, 1]) + + with col1: + # Try to load upcoming games first + upcoming_games = load_upcoming_games(DEFAULT_TEAM_ID) + + if upcoming_games and api.safe_get(upcoming_games, 'upcoming_games'): + st.markdown("#### 📅 Upcoming Opponents") + + games = upcoming_games['upcoming_games'][:5] + upcoming_opponents = {} + + for game in games: + home_team = api.safe_get(game, 'home_team_name') + away_team = api.safe_get(game, 'away_team_name') + game_date = api.safe_get(game, 'game_date') + + # Determine opponent + if home_team == "Brooklyn Nets": + opponent = away_team + location = "vs" + else: + opponent = home_team + location = "@" + + if opponent: + display_name = f"{location} {opponent} ({game_date})" + upcoming_opponents[display_name] = { + 'name': opponent, + 'date': game_date, + 'location': location + } + + if upcoming_opponents: + selected_upcoming = st.selectbox( + "Choose from upcoming games:", + options=["Select upcoming opponent..."] + list(upcoming_opponents.keys()), + key="upcoming_opponent" + ) + + if selected_upcoming != "Select upcoming opponent...": + return upcoming_opponents[selected_upcoming]['name'], selected_upcoming + + # All teams selector + st.markdown("#### 🏀 All NBA Teams") + teams = load_teams() + + if teams: + # Filter out user's team + available_teams = [team for team in teams if team.get('team_id') != DEFAULT_TEAM_ID] + team_options = {team['name']: team for team in available_teams} + + selected_team = st.selectbox( + "Or select any NBA team:", + options=["Select any team..."] + list(team_options.keys()), + key="any_team" + ) + + if selected_team != "Select any team...": + return team_options[selected_team]['name'], team_options[selected_team] + else: + # Fallback options + fallback_teams = { + "Boston Celtics": {"team_id": 2, "name": "Boston Celtics"}, + "Miami Heat": {"team_id": 3, "name": "Miami Heat"}, + "Philadelphia 76ers": {"team_id": 4, "name": "Philadelphia 76ers"}, + "Milwaukee Bucks": {"team_id": 5, "name": "Milwaukee Bucks"} + } + + selected_fallback = st.selectbox( + "Select opponent (fallback):", + options=list(fallback_teams.keys()), + key="fallback_team" + ) + + if selected_fallback: + return selected_fallback, fallback_teams[selected_fallback] + + with col2: + st.markdown("#### ⚡ Quick Actions") + + if st.button("📋 Recent Scouts", help="View recently scouted teams"): + st.info("Recent scouting reports feature coming soon!") + + if st.button("⭐ Favorites", help="View favorite opponents to scout"): + st.info("Favorite opponents feature coming soon!") + + return None, None + + +def render_opponent_overview(opponent_name, opponent_data): + """Render high-level opponent overview.""" + st.subheader(f"📊 {opponent_name} - Team Overview") + + # Load opponent report + opponent_id = opponent_data.get('team_id') if isinstance(opponent_data, dict) else 2 + report = load_opponent_report(DEFAULT_TEAM_ID, opponent_id) + + col1, col2, col3, col4 = st.columns(4) + + if report and api.safe_get(report, 'recent_performance'): + perf = report['recent_performance'] + record = api.safe_get(perf, 'record', '0-0') + win_pct = api.safe_get(perf, 'win_percentage', 0) + ppg = api.safe_get(perf, 'avg_points_scored', 0) + opp_ppg = api.safe_get(perf, 'avg_points_allowed', 0) + else: + # Fallback data based on opponent + if "Celtics" in opponent_name: + record, win_pct, ppg, opp_ppg = "32-9", 78.0, 118.5, 110.2 + elif "Heat" in opponent_name: + record, win_pct, ppg, opp_ppg = "28-13", 68.3, 112.8, 108.5 + else: + record, win_pct, ppg, opp_ppg = "25-16", 61.0, 114.2, 111.8 + + with col1: + st.markdown(f""" +
+

Record

+

{record}

+

{win_pct:.1f}% Win Rate

+
+ """, unsafe_allow_html=True) + + with col2: + st.markdown(f""" +
+

Offense

+

{ppg:.1f}

+

Points Per Game

+
+ """, unsafe_allow_html=True) + + with col3: + net_rating = ppg - opp_ppg + st.markdown(f""" +
+

Defense

+

{opp_ppg:.1f}

+

Opp Points Per Game

+
+ """, unsafe_allow_html=True) + + with col4: + st.markdown(f""" +
+

Net Rating

+

{net_rating:+.1f}

+

Point Differential

+
+ """, unsafe_allow_html=True) + + +def render_key_players(opponent_name, report): + """Render key players analysis.""" + st.subheader(f"⭐ {opponent_name} - Key Players") + + key_players = api.safe_get(report, 'key_players', []) if report else [] + + if not key_players: + # Generate fallback player data based on opponent + if "Celtics" in opponent_name: + key_players = [ + {"first_name": "Jayson", "last_name": "Tatum", "position": "SF", "avg_points": 30.1, "avg_rebounds": 8.8, "avg_assists": 4.9}, + {"first_name": "Jaylen", "last_name": "Brown", "position": "SG", "avg_points": 27.2, "avg_rebounds": 7.0, "avg_assists": 3.5}, + {"first_name": "Kristaps", "last_name": "Porzingis", "position": "C", "avg_points": 20.1, "avg_rebounds": 7.2, "avg_assists": 2.0} + ] + elif "Heat" in opponent_name: + key_players = [ + {"first_name": "Jimmy", "last_name": "Butler", "position": "SF", "avg_points": 22.5, "avg_rebounds": 5.3, "avg_assists": 5.0}, + {"first_name": "Bam", "last_name": "Adebayo", "position": "C", "avg_points": 19.3, "avg_rebounds": 10.4, "avg_assists": 3.9}, + {"first_name": "Tyler", "last_name": "Herro", "position": "SG", "avg_points": 20.8, "avg_rebounds": 5.3, "avg_assists": 4.5} + ] + else: + key_players = [ + {"first_name": "Star", "last_name": "Player", "position": "SF", "avg_points": 25.0, "avg_rebounds": 6.5, "avg_assists": 5.2}, + {"first_name": "Second", "last_name": "Option", "position": "PG", "avg_points": 18.7, "avg_rebounds": 4.1, "avg_assists": 7.8}, + {"first_name": "Role", "last_name": "Player", "position": "C", "avg_points": 12.3, "avg_rebounds": 8.9, "avg_assists": 2.1} + ] + + # Display top 3 players in columns + if len(key_players) >= 3: + col1, col2, col3 = st.columns(3) + columns = [col1, col2, col3] + + for i, player in enumerate(key_players[:3]): + with columns[i]: + name = f"{player.get('first_name', 'Unknown')} {player.get('last_name', 'Player')}" + position = player.get('position', 'N/A') + ppg = player.get('avg_points', 0) + rpg = player.get('avg_rebounds', 0) + apg = player.get('avg_assists', 0) + + st.markdown(f""" +
+

{name}

+
{position}
+

{ppg:.1f} PPG

+

{rpg:.1f} RPG

+

{apg:.1f} APG

+
+ """, unsafe_allow_html=True) + + +def render_strengths_weaknesses(opponent_name): + """Render team strengths and weaknesses analysis.""" + st.subheader(f"🔍 {opponent_name} - Strengths & Weaknesses") + + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### 💪 Team Strengths") + + # Generate strengths based on opponent + if "Celtics" in opponent_name: + strengths = [ + "Elite three-point shooting (38.8%)", + "Strong defensive versatility", + "Excellent ball movement (27.2 APG)", + "Clutch time execution", + "Deep bench rotation" + ] + elif "Heat" in opponent_name: + strengths = [ + "Physical, grind-it-out style", + "Strong corner three shooting", + "Excellent coaching adjustments", + "Defensive rebounding (47.8 RPG)", + "Culture and mental toughness" + ] + else: + strengths = [ + "Fast-paced offense", + "Strong transition game", + "Good team chemistry", + "Effective pick-and-roll", + "Solid home court advantage" + ] + + for strength in strengths: + st.markdown(f""" +
+

✅ {strength}

+
+ """, unsafe_allow_html=True) + st.write("") # Add spacing + + with col2: + st.markdown("#### ⚠️ Team Weaknesses") + + # Generate weaknesses based on opponent + if "Celtics" in opponent_name: + weaknesses = [ + "Can be vulnerable to switches", + "Over-reliance on three-point shots", + "Size disadvantage in post", + "Turnover prone at times", + "Can struggle against zones" + ] + elif "Heat" in opponent_name: + weaknesses = [ + "Inconsistent offensive output", + "Limited bench scoring", + "Can be beaten by pace", + "Three-point shooting streaky", + "Age-related durability concerns" + ] + else: + weaknesses = [ + "Inconsistent defense", + "Lack of clutch scoring", + "Poor road record", + "Bench depth issues", + "Vulnerable to physical play" + ] + + for weakness in weaknesses: + st.markdown(f""" +
+

❌ {weakness}

+
+ """, unsafe_allow_html=True) + st.write("") # Add spacing + + +def render_strategic_recommendations(opponent_name): + """Render strategic game plan recommendations.""" + st.subheader(f"💡 Strategic Game Plan vs {opponent_name}") + + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### ⚔️ Offensive Strategy") + + # Generate offensive strategies based on opponent + if "Celtics" in opponent_name: + off_strategies = [ + "Attack switches with KD post-ups", + "Run early offense before defense sets", + "Use Simmons as screener for mismatches", + "Target corner threes vs zone looks", + "Push pace when possible" + ] + elif "Heat" in opponent_name: + off_strategies = [ + "Move ball quickly vs pressure", + "Attack Butler in pick-and-roll", + "Use pace to wear down defense", + "Create open threes via penetration", + "Exploit size advantage inside" + ] + else: + off_strategies = [ + "Push transition opportunities", + "Target weaker rim protection", + "Create threes through ball movement", + "Use size advantage in post", + "Attack their bench unit" + ] + + for i, strategy in enumerate(off_strategies, 1): + st.markdown(f""" +
+

{i}. {strategy}

+
+ """, unsafe_allow_html=True) + st.write("") # Add spacing + + with col2: + st.markdown("#### 🛡️ Defensive Adjustments") + + # Generate defensive strategies based on opponent + if "Celtics" in opponent_name: + def_strategies = [ + "Limit transition opportunities", + "Contest all three-point attempts", + "Force Tatum into tough iso shots", + "Switch 1-4 to match their switching", + "Protect the paint vs drives" + ] + elif "Heat" in opponent_name: + def_strategies = [ + "Control pace and tempo", + "Limit Butler drives to rim", + "Challenge their shooters", + "Secure defensive rebounds", + "Stay disciplined vs their physicality" + ] + else: + def_strategies = [ + "Protect the paint", + "Close out hard on shooters", + "Limit fast break opportunities", + "Force them into half-court sets", + "Stay aggressive on defense" + ] + + for i, strategy in enumerate(def_strategies, 1): + st.markdown(f""" +
+

{i}. {strategy}

+
+ """, unsafe_allow_html=True) + st.write("") # Add spacing + + +def render_key_matchups(opponent_name): + """Render key individual matchups to monitor.""" + st.subheader(f"👥 Key Individual Matchups") + + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### 🟢 Favorable Matchups") + + favorable_matchups = [ + { + "player": "Kevin Durant", + "matchup": "Their SF", + "advantage": "Size and skill advantage" + }, + { + "player": "Kyrie Irving", + "matchup": "Their PG", + "advantage": "Speed and ball-handling" + }, + { + "player": "Nic Claxton", + "matchup": "Their C", + "advantage": "Athleticism and rim protection" + } + ] + + for matchup in favorable_matchups: + st.success(f"✅ **{matchup['player']}** vs {matchup['matchup']}") + st.caption(matchup['advantage']) + + with col2: + st.markdown("#### 🔴 Challenging Matchups") + + if "Celtics" in opponent_name: + challenging_matchups = [ + { + "player": "Ben Simmons", + "matchup": "Jayson Tatum", + "challenge": "Must stay disciplined defensively" + }, + { + "player": "Bench Unit", + "matchup": "Their depth", + "challenge": "Need to compete with energy" + }, + { + "player": "Team Rebounding", + "matchup": "Their size", + "challenge": "They're strong on the glass" + } + ] + else: + challenging_matchups = [ + { + "player": "Role Players", + "matchup": "Their stars", + "challenge": "Need to limit star impact" + }, + { + "player": "Bench Production", + "matchup": "Their depth", + "challenge": "Must match their energy" + }, + { + "player": "Fourth Quarter", + "matchup": "Clutch time", + "challenge": "Execute down the stretch" + } + ] + + for matchup in challenging_matchups: + st.warning(f"⚠️ **{matchup['player']}** vs {matchup['matchup']}") + st.caption(matchup['challenge']) + + +def render_save_gameplan(): + """Render interface to save and customize game plans.""" + st.subheader("💾 Save Custom Game Plan") + + with st.expander("📋 Create Detailed Game Plan", expanded=False): + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### Offensive Game Plan") + offensive_notes = st.text_area( + "Key offensive strategies and adjustments:", + height=120, + placeholder="Enter specific plays, sets, and adjustments to run against this opponent..." + ) + + timeout_plays = st.text_area( + "Timeout and special situation plays:", + height=80, + placeholder="ATO plays, end-of-game scenarios, special sets..." + ) + + with col2: + st.markdown("#### Defensive Game Plan") + defensive_notes = st.text_area( + "Key defensive strategies and coverages:", + height=120, + placeholder="Enter defensive adjustments, coverages, and focus areas..." + ) + + personnel_notes = st.text_area( + "Personnel and rotation notes:", + height=80, + placeholder="Specific rotation adjustments, matchup considerations..." + ) + + # Save options + st.markdown("#### Save Options") + col1, col2, col3 = st.columns(3) + + with col1: + plan_name = st.text_input( + "Game Plan Name:", + placeholder="e.g., vs Celtics - Jan 15" + ) + + with col2: + priority = st.selectbox( + "Priority Level:", + options=["High", "Medium", "Low"] + ) + + with col3: + share_with = st.multiselect( + "Share with:", + options=["Assistant Coaches", "Analytics Team", "Players"], + default=["Assistant Coaches"] + ) + + # Save button + col1, col2, col3 = st.columns([1, 1, 1]) + + with col2: + if st.button("💾 Save Game Plan", type="primary", use_container_width=True): + if plan_name and (offensive_notes or defensive_notes): + # In a real application, this would save to the database + st.success("✅ Game plan saved successfully!") + st.balloons() + + # Show saved plan summary + with st.container(): + st.markdown("#### Saved Game Plan Summary") + st.info(f"**Name:** {plan_name}") + st.info(f"**Priority:** {priority}") + st.info(f"**Shared with:** {', '.join(share_with)}") + else: + st.error("Please provide a plan name and at least one strategy note.") + + +def main(): + """Main application function.""" + try: + # Render page header + render_page_header() + st.markdown("---") + + # Opponent selection + opponent_name, opponent_data = render_opponent_selector() + + if opponent_name and opponent_data: + st.success(f"🎯 Scouting Report Generated for **{opponent_name}**") + st.markdown("---") + + # Load comprehensive report + opponent_id = opponent_data.get('team_id') if isinstance(opponent_data, dict) else 2 + report = load_opponent_report(DEFAULT_TEAM_ID, opponent_id) + + # Render all analysis sections + render_opponent_overview(opponent_name, opponent_data) + + st.markdown("---") + render_key_players(opponent_name, report) + + st.markdown("---") + render_strengths_weaknesses(opponent_name) + + st.markdown("---") + render_strategic_recommendations(opponent_name) + + st.markdown("---") + render_key_matchups(opponent_name) + + st.markdown("---") + render_save_gameplan() + + else: + st.info("👆 Please select an opponent to generate a scouting report") + + # Show quick tips while waiting + with st.container(): + st.markdown("### 💡 Scouting Report Features") + + col1, col2, col3 = st.columns(3) + + with col1: + st.markdown(""" + #### 🎯 Strategic Analysis + - Comprehensive team overview + - Recent performance trends + - Key statistical insights + """) + + with col2: + st.markdown(""" + #### 👥 Player Breakdowns + - Star player analysis + - Role player identification + - Matchup advantages + """) + + with col3: + st.markdown(""" + #### 💾 Game Planning + - Customizable strategies + - Save and share plans + - Historical comparisons + """) + + # Footer + st.markdown("---") + st.caption("*Scouting reports are updated with the latest available data*") + + except Exception as e: + st.error("An error occurred while loading the scouting report.") + logger.error(f"Scouting report error: {e}") + + if st.button("🔄 Retry"): + st.cache_data.clear() + st.rerun() + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/app/src/pages/32_Lineup_Analysis.py b/app/src/pages/32_Lineup_Analysis.py new file mode 100644 index 0000000000..e1b316af50 --- /dev/null +++ b/app/src/pages/32_Lineup_Analysis.py @@ -0,0 +1,867 @@ +################################################## +# BallWatch Basketball Analytics - Lineup Effectiveness Analysis +# Optimize player combinations and rotations for coaches +# +# User Story: Marcus-3.4 - Lineup effectiveness and rotation optimization +# +# Features: +# - Lineup performance analysis with advanced metrics +# - Player combination effectiveness +# - Rotation optimization recommendations +# - Interactive lineup builder and scenario testing +################################################## + +import logging +import streamlit as st +import requests +import pandas as pd +import plotly.express as px +import plotly.graph_objects as go +from datetime import datetime, timedelta +import numpy as np +from modules.nav import SideBarLinks, check_authentication + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Page configuration +st.set_page_config( + page_title="BallWatch - Lineup Analysis", + layout="wide" +) + +# Check authentication and role +check_authentication('head_coach') +SideBarLinks() + +# Constants +BASE_URL = "http://api:4000" +DEFAULT_TEAM_ID = st.session_state.get('team_id', 1) +COACH_NAME = st.session_state.get('first_name', 'Coach') + +# Styling +st.markdown(""" + +""", unsafe_allow_html=True) + + +class LineupAnalysisAPI: + """API client for lineup analysis with comprehensive error handling.""" + + @staticmethod + def make_request(endpoint, params=None, timeout=15): + """Make API request with error handling.""" + try: + url = f"{BASE_URL}{endpoint}" + response = requests.get(url, params=params, timeout=timeout) + + if response.status_code == 200: + return response.json() + else: + logger.warning(f"API request failed: {response.status_code}") + return None + + except requests.exceptions.RequestException as e: + logger.error(f"API request error: {e}") + return None + + @staticmethod + def safe_get(data, *keys, default=None): + """Safely extract nested values.""" + try: + for key in keys: + data = data[key] + return data + except (KeyError, TypeError, AttributeError): + return default + + +# Initialize API client +api = LineupAnalysisAPI() + + +@st.cache_data(ttl=300, show_spinner=False) +def load_teams(): + """Load available teams for analysis.""" + teams_data = api.make_request("/api/teams") + if teams_data and 'teams' in teams_data: + return teams_data['teams'] + return None + + +@st.cache_data(ttl=300, show_spinner=False) +def load_lineup_analysis(team_id, min_games=5, season=None, sort_by='plus_minus'): + """Load lineup effectiveness data.""" + params = { + "team_id": team_id, + "min_games": min_games, + "sort_by": sort_by + } + if season: + params["season"] = season + + return api.make_request("/api/analytics/lineup-configurations", params=params) + + +@st.cache_data(ttl=600, show_spinner=False) +def load_team_roster(team_id): + """Load team roster for lineup building.""" + params = {"include_stats": "true"} + return api.make_request(f"/api/teams/{team_id}/players", params=params) + + +def generate_sample_lineup_data(): + """Generate realistic sample lineup data for demonstration.""" + import random + + lineup_combinations = [ + "K. Irving, M. Bridges, K. Durant, N. Claxton, B. Simmons", + "K. Durant, K. Irving, C. Johnson, M. Bridges, N. Claxton", + "B. Simmons, K. Irving, K. Durant, C. Johnson, N. Claxton", + "C. Johnson, M. Bridges, K. Durant, D. Finney-Smith, N. Claxton", + "K. Irving, M. Bridges, K. Durant, B. Simmons, D. Sharpe", + "B. Simmons, C. Johnson, M. Bridges, K. Durant, D. Finney-Smith", + "K. Irving, K. Durant, M. Bridges, N. Claxton, C. Johnson", + "D. Thomas, M. Bridges, K. Durant, N. Claxton, B. Simmons", + "K. Irving, C. Johnson, K. Durant, B. Simmons, N. Claxton" + ] + + data = [] + for i, lineup in enumerate(lineup_combinations): + plus_minus = random.uniform(-8.5, 12.3) + off_rating = random.uniform(108.2, 118.7) + def_rating = random.uniform(105.1, 115.8) + games_played = random.randint(8, 35) + total_minutes = random.uniform(120, 450) + + data.append({ + 'lineup_id': i + 1, + 'lineup_players': lineup, + 'avg_plus_minus': round(plus_minus, 1), + 'avg_offensive_rating': round(off_rating, 1), + 'avg_defensive_rating': round(def_rating, 1), + 'games_played': games_played, + 'total_minutes': round(total_minutes, 1), + 'quarters_played': random.randint(15, 60), + 'avg_fg_pct': round(random.uniform(0.42, 0.52), 3), + 'avg_points_per_game': round(random.uniform(22, 32), 1) + }) + + return pd.DataFrame(data) + + +def render_page_header(): + """Render the page header with filters.""" + col1, col2, col3 = st.columns([2, 1, 1]) + + with col1: + st.title("📊 Lineup Effectiveness Analysis") + st.markdown("*Optimize player combinations and rotations*") + + with col2: + st.markdown(f"**Coach:** {COACH_NAME}") + st.markdown("**Focus:** Rotation Strategy") + + with col3: + if st.button("🔄 Refresh Analysis", help="Refresh lineup data"): + st.cache_data.clear() + st.rerun() + + +def render_analysis_filters(): + """Render comprehensive filter controls.""" + st.subheader("🔧 Analysis Parameters") + + col1, col2, col3, col4 = st.columns(4) + + with col1: + # Team selection + teams = load_teams() + + if teams: + team_options = {team['name']: team['team_id'] for team in teams} + + # Find default team index + default_index = 0 + if DEFAULT_TEAM_ID in team_options.values(): + default_index = list(team_options.values()).index(DEFAULT_TEAM_ID) + + selected_team_name = st.selectbox( + "Select Team:", + options=list(team_options.keys()), + index=default_index, + help="Choose the team to analyze" + ) + team_id = team_options[selected_team_name] + else: + # Fallback team selection + fallback_teams = { + "Brooklyn Nets": 1, + "Boston Celtics": 2, + "Miami Heat": 3, + "Philadelphia 76ers": 4 + } + selected_team_name = st.selectbox( + "Select Team:", + options=list(fallback_teams.keys()), + help="Choose the team to analyze" + ) + team_id = fallback_teams[selected_team_name] + + with col2: + min_games = st.number_input( + "Min Games Together:", + min_value=1, + max_value=50, + value=5, + step=1, + help="Minimum games played together to include lineup" + ) + + with col3: + season = st.selectbox( + "Season:", + options=["All Seasons", "2024-25", "2023-24", "2022-23"], + index=1, + help="Season to analyze" + ) + season_param = None if season == "All Seasons" else season + + with col4: + sort_options = { + "Plus/Minus": "plus_minus", + "Offensive Rating": "offensive_rating", + "Defensive Rating": "defensive_rating", + "Games Played": "games_played", + "Total Minutes": "minutes_played" + } + sort_by_display = st.selectbox( + "Sort By:", + options=list(sort_options.keys()), + help="Primary sorting metric" + ) + sort_by = sort_options[sort_by_display] + + return team_id, selected_team_name, min_games, season_param, sort_by, sort_by_display + + +def format_lineup_display(lineup_str): + """Format lineup string for better display.""" + if not lineup_str: + return "Unknown Lineup" + + # Split by comma and format names + players = lineup_str.split(', ') + formatted_players = [] + + for player in players[:5]: # Limit to 5 players + player = player.strip() + # If it's already in short format (K. Durant), keep it + if '. ' in player and len(player) < 15: + formatted_players.append(player) + else: + # Convert full name to short format + parts = player.split(' ') + if len(parts) >= 2: + formatted_players.append(f"{parts[0][0]}. {parts[-1]}") + else: + formatted_players.append(player) + + return ' | '.join(formatted_players) + + +def render_lineup_effectiveness_table(df, sort_by_display): + """Render the main lineup effectiveness results table.""" + st.subheader("🏀 Lineup Performance Results") + + if df.empty: + st.warning("⚠️ No lineup data available with current filters.") + st.info("Try adjusting your filters or check if the team has sufficient game data.") + return + + # Sort data by the selected metric + if sort_by_display == "Defensive Rating": + df_sorted = df.sort_values('avg_defensive_rating', ascending=True) # Lower is better + elif sort_by_display == "Plus/Minus": + df_sorted = df.sort_values('avg_plus_minus', ascending=False) + elif sort_by_display == "Offensive Rating": + df_sorted = df.sort_values('avg_offensive_rating', ascending=False) + elif sort_by_display == "Games Played": + df_sorted = df.sort_values('games_played', ascending=False) + else: # Total Minutes + df_sorted = df.sort_values('total_minutes', ascending=False) + + # Create enhanced display DataFrame + display_df = pd.DataFrame() + + # Format lineup names + display_df['Lineup'] = df_sorted['lineup_players'].apply(format_lineup_display) + + # Format numeric columns with appropriate styling + display_df['Games'] = df_sorted['games_played'].astype(str) + display_df['Minutes'] = df_sorted['total_minutes'].apply(lambda x: f"{x:.0f}") + display_df['+/- Rating'] = df_sorted['avg_plus_minus'].apply(lambda x: f"{x:+.1f}") + display_df['Off Rating'] = df_sorted['avg_offensive_rating'].apply(lambda x: f"{x:.1f}") + display_df['Def Rating'] = df_sorted['avg_defensive_rating'].apply(lambda x: f"{x:.1f}") + + if 'avg_fg_pct' in df_sorted.columns: + display_df['FG%'] = df_sorted['avg_fg_pct'].apply(lambda x: f"{x:.1%}") + + if 'avg_points_per_game' in df_sorted.columns: + display_df['PPG'] = df_sorted['avg_points_per_game'].apply(lambda x: f"{x:.1f}") + + # Display with color coding + st.dataframe( + display_df, + use_container_width=True, + hide_index=True + ) + + # Add insights below the table + best_lineup = df_sorted.iloc[0] + worst_lineup = df_sorted.iloc[-1] + + col1, col2, col3 = st.columns(3) + + with col1: + st.markdown(f""" +
+
🏆 Best Performing Lineup
+

{format_lineup_display(best_lineup['lineup_players'])}

+

+/- Rating: {best_lineup['avg_plus_minus']:+.1f}

+

Games: {best_lineup['games_played']}

+
+ """, unsafe_allow_html=True) + + with col2: + # Find most used lineup + most_used = df_sorted.loc[df_sorted['total_minutes'].idxmax()] + st.markdown(f""" +
+
⏱️ Most Used Lineup
+

{format_lineup_display(most_used['lineup_players'])}

+

Minutes: {most_used['total_minutes']:.0f}

+

+/- Rating: {most_used['avg_plus_minus']:+.1f}

+
+ """, unsafe_allow_html=True) + + with col3: + # Calculate efficiency + avg_plus_minus = df_sorted['avg_plus_minus'].mean() + efficient_lineups = len(df_sorted[df_sorted['avg_plus_minus'] > avg_plus_minus]) + + efficiency_color = "positive-metric" if efficient_lineups > len(df_sorted) / 2 else "negative-metric" + + st.markdown(f""" +
+
📈 Lineup Efficiency
+

{efficient_lineups}/{len(df_sorted)} lineups above average

+

Avg +/-: {avg_plus_minus:+.1f}

+

Efficiency: {(efficient_lineups/len(df_sorted)*100):.0f}%

+
+ """, unsafe_allow_html=True) + + +def render_performance_visualizations(df): + """Render interactive lineup performance visualizations.""" + st.subheader("📈 Performance Visualizations") + + if df.empty: + st.info("No data available for visualization.") + return + + # Limit to top 12 lineups for readability + df_viz = df.head(12).copy() + df_viz['lineup_short'] = df_viz['lineup_players'].apply(format_lineup_display) + + col1, col2 = st.columns(2) + + with col1: + # Plus/Minus horizontal bar chart + fig_plus_minus = px.bar( + df_viz, + x='avg_plus_minus', + y='lineup_short', + orientation='h', + title='Plus/Minus Rating by Lineup', + labels={ + 'avg_plus_minus': 'Plus/Minus Rating', + 'lineup_short': 'Lineup' + }, + color='avg_plus_minus', + color_continuous_scale=['red', 'white', 'green'], + color_continuous_midpoint=0 + ) + + fig_plus_minus.update_layout( + height=500, + showlegend=False, + yaxis={'categoryorder': 'total ascending'} + ) + + fig_plus_minus.update_traces( + hovertemplate="%{y}
+/- Rating: %{x:+.1f}" + ) + + st.plotly_chart(fig_plus_minus, use_container_width=True) + + with col2: + # Offensive vs Defensive Rating scatter plot + fig_efficiency = px.scatter( + df_viz, + x='avg_defensive_rating', + y='avg_offensive_rating', + size='total_minutes', + color='avg_plus_minus', + hover_name='lineup_short', + title='Offensive vs Defensive Efficiency', + labels={ + 'avg_defensive_rating': 'Defensive Rating (lower is better)', + 'avg_offensive_rating': 'Offensive Rating (higher is better)', + 'total_minutes': 'Total Minutes', + 'avg_plus_minus': '+/- Rating' + }, + color_continuous_scale=['red', 'white', 'green'], + color_continuous_midpoint=0 + ) + + # Add quadrant lines for league averages + league_avg_off = df_viz['avg_offensive_rating'].median() + league_avg_def = df_viz['avg_defensive_rating'].median() + + fig_efficiency.add_hline( + y=league_avg_off, + line_dash="dash", + line_color="gray", + opacity=0.7, + annotation_text="League Avg Offense" + ) + fig_efficiency.add_vline( + x=league_avg_def, + line_dash="dash", + line_color="gray", + opacity=0.7, + annotation_text="League Avg Defense" + ) + + fig_efficiency.update_layout(height=500) + + st.plotly_chart(fig_efficiency, use_container_width=True) + + +def render_insights_and_recommendations(df, team_name): + """Render actionable insights and coaching recommendations.""" + st.subheader("💡 Coaching Insights & Recommendations") + + if df.empty: + st.info("No lineup data available for analysis.") + return + + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### 🔥 Key Findings") + + # Analyze the data for insights + positive_lineups = len(df[df['avg_plus_minus'] > 0]) + total_lineups = len(df) + best_plus_minus = df['avg_plus_minus'].max() + worst_plus_minus = df['avg_plus_minus'].min() + + # High usage lineups analysis + df['usage_rank'] = df['total_minutes'].rank(method='dense', ascending=False) + high_usage_lineups = df[df['usage_rank'] <= 3] + high_usage_avg_plus_minus = high_usage_lineups['avg_plus_minus'].mean() + + insights = [] + + # Performance insights + if positive_lineups / total_lineups > 0.6: + insights.append(("positive", f"✅ Strong lineup depth: {positive_lineups}/{total_lineups} lineups are positive")) + else: + insights.append(("negative", f"⚠️ Lineup concerns: Only {positive_lineups}/{total_lineups} lineups are positive")) + + # Usage vs performance insight + if high_usage_avg_plus_minus > 3: + insights.append(("positive", f"✅ Top rotations are effective (+{high_usage_avg_plus_minus:.1f} avg)")) + elif high_usage_avg_plus_minus < 0: + insights.append(("negative", f"❌ Need rotation changes: Top lineups averaging {high_usage_avg_plus_minus:+.1f}")) + else: + insights.append(("neutral", f"📊 Mixed results from top rotations ({high_usage_avg_plus_minus:+.1f} avg)")) + + # Range of performance + performance_range = best_plus_minus - worst_plus_minus + if performance_range > 15: + insights.append(("neutral", f"📊 Wide performance gap: {performance_range:.1f} point spread")) + + # Display insights + for insight_type, insight_text in insights: + if insight_type == "positive": + st.success(insight_text) + elif insight_type == "negative": + st.error(insight_text) + else: + st.info(insight_text) + + with col2: + st.markdown("#### 🎯 Coaching Recommendations") + + recommendations = [] + + # Analyze data for recommendations + if not df.empty: + # Find underused high-performers + df['performance_tier'] = pd.qcut(df['avg_plus_minus'], q=3, labels=['Low', 'Medium', 'High']) + df['usage_tier'] = pd.qcut(df['total_minutes'], q=3, labels=['Low', 'Medium', 'High']) + + underused_performers = df[ + (df['performance_tier'] == 'High') & + (df['usage_tier'] == 'Low') + ] + + overused_underperformers = df[ + (df['performance_tier'] == 'Low') & + (df['usage_tier'] == 'High') + ] + + if not underused_performers.empty: + recommendations.append("📈 Consider increasing minutes for high-performing lineups") + + if not overused_underperformers.empty: + recommendations.append("📉 Reduce usage of underperforming rotations") + + # Defensive analysis + good_defense = df[df['avg_defensive_rating'] < df['avg_defensive_rating'].median()] + if not good_defense.empty: + recommendations.append("🛡️ Utilize best defensive lineups in key moments") + + # Offensive analysis + good_offense = df[df['avg_offensive_rating'] > df['avg_offensive_rating'].median()] + if not good_offense.empty: + recommendations.append("⚡ Deploy top offensive lineups when trailing") + + # Add general recommendations + recommendations.extend([ + "🔄 Experiment with new combinations in low-stakes games", + "📊 Monitor +/- trends throughout the season", + "🎯 Focus on lineups that will play in playoffs" + ]) + + for i, rec in enumerate(recommendations[:6], 1): # Limit to 6 recommendations + st.markdown(f"{i}. {rec}") + + +def render_lineup_optimizer(): + """Render interactive lineup optimization tools.""" + st.subheader("🔧 Lineup Optimizer & Scenario Testing") + + with st.expander("🎯 Build Custom Lineup", expanded=False): + col1, col2 = st.columns([3, 2]) + + with col1: + st.markdown("#### Player Selection") + + # Load team roster for player selection + roster_data = load_team_roster(DEFAULT_TEAM_ID) + + if roster_data and api.safe_get(roster_data, 'roster'): + available_players = [] + for player in roster_data['roster']: + name = f"{player.get('first_name', 'Unknown')} {player.get('last_name', 'Player')}" + position = player.get('position', 'N/A') + available_players.append(f"{name} ({position})") + + selected_players = st.multiselect( + "Select 5 Players:", + options=available_players, + max_selections=5, + help="Choose exactly 5 players for the lineup" + ) + + # Position validation + if len(selected_players) == 5: + st.success("✅ Valid lineup selected!") + elif len(selected_players) > 0: + st.info(f"📝 {len(selected_players)}/5 players selected") + + else: + # Fallback player list + fallback_players = [ + "Kevin Durant (SF)", "Kyrie Irving (PG)", "Ben Simmons (PF)", + "Nic Claxton (C)", "Cam Johnson (SG)", "Mikal Bridges (SF)", + "Dorian Finney-Smith (PF)", "Day'Ron Sharpe (C)", + "Dennis Smith Jr. (PG)", "Royce O'Neale (SF)" + ] + + selected_players = st.multiselect( + "Select 5 Players:", + options=fallback_players, + max_selections=5, + help="Choose exactly 5 players for the lineup" + ) + + with col2: + st.markdown("#### Lineup Prediction") + + if len(selected_players) == 5: + # Generate mock predictions based on player selection + # In a real system, this would use ML models or complex analytics + + base_rating = 110 # Starting point + + # Adjust based on player quality (simplified) + star_players = sum(1 for p in selected_players if any(star in p for star in ["Durant", "Irving"])) + base_rating += star_players * 4 + + # Position balance check + positions = [p.split('(')[-1].strip(')') for p in selected_players] + if len(set(positions)) >= 4: # Good position diversity + base_rating += 2 + + predicted_plus_minus = round(base_rating - 110 + np.random.normal(0, 2), 1) + predicted_off_rating = round(base_rating + np.random.normal(0, 3), 1) + predicted_def_rating = round(110 - (base_rating - 110) * 0.3 + np.random.normal(0, 2), 1) + + # Display predictions + st.metric("Predicted +/-", f"{predicted_plus_minus:+.1f}") + st.metric("Predicted Off Rating", f"{predicted_off_rating:.1f}") + st.metric("Predicted Def Rating", f"{predicted_def_rating:.1f}") + + # Lineup assessment + if predicted_plus_minus > 5: + st.success("🔥 High-potential lineup!") + elif predicted_plus_minus > 0: + st.info("📊 Solid lineup combination") + else: + st.warning("⚠️ May need adjustments") + + else: + st.info("👆 Select exactly 5 players to see predictions") + + # Show lineup building tips + st.markdown("##### 💡 Lineup Building Tips") + tips = [ + "Balance scoring and defense", + "Consider position versatility", + "Mix veterans with young players", + "Account for chemistry and fit" + ] + + for tip in tips: + st.caption(f"• {tip}") + + # Save lineup scenario + st.markdown("---") + + col1, col2, col3 = st.columns([1, 2, 1]) + + with col2: + scenario_name = st.text_input( + "Scenario Name:", + placeholder="e.g., Closing Lineup, Small Ball, etc." + ) + + if st.button("💾 Save Lineup Scenario", type="primary", use_container_width=True): + if len(selected_players) == 5 and scenario_name.strip(): + # In a real app, this would save to database + st.success(f"✅ Saved '{scenario_name}' lineup scenario!") + st.balloons() + else: + st.error("Please provide a scenario name and select exactly 5 players.") + + +def render_rotation_analysis(df): + """Render rotation pattern analysis.""" + st.subheader("🔄 Rotation Pattern Analysis") + + if df.empty: + st.info("No data available for rotation analysis.") + return + + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### ⏰ Usage Patterns") + + # Categorize lineups by usage + df['usage_category'] = pd.cut( + df['total_minutes'], + bins=3, + labels=['Low Usage', 'Medium Usage', 'High Usage'] + ) + + usage_summary = df.groupby('usage_category').agg({ + 'avg_plus_minus': 'mean', + 'lineup_players': 'count' + }).round(1) + + for category, data in usage_summary.iterrows(): + count = int(data['lineup_players']) + avg_rating = data['avg_plus_minus'] + + color_class = "positive-metric" if avg_rating > 2 else "negative-metric" if avg_rating < -2 else "neutral-metric" + + st.markdown(f""" +
+
{category}
+

{count} lineups • Avg +/-: {avg_rating:+.1f}

+
+ """, unsafe_allow_html=True) + + with col2: + st.markdown("#### 🎯 Optimization Opportunities") + + opportunities = [] + + # Find optimization opportunities + if not df.empty: + # High performers with low usage + high_perf_low_usage = df[ + (df['avg_plus_minus'] > df['avg_plus_minus'].quantile(0.75)) & + (df['total_minutes'] < df['total_minutes'].quantile(0.5)) + ] + + if not high_perf_low_usage.empty: + opportunities.append(f"📈 {len(high_perf_low_usage)} high-performing lineups are underutilized") + + # Low performers with high usage + low_perf_high_usage = df[ + (df['avg_plus_minus'] < df['avg_plus_minus'].quantile(0.25)) & + (df['total_minutes'] > df['total_minutes'].quantile(0.5)) + ] + + if not low_perf_high_usage.empty: + opportunities.append(f"📉 {len(low_perf_high_usage)} overused lineups underperforming") + + # Defensive specialists + good_defense = df[df['avg_defensive_rating'] < df['avg_defensive_rating'].quantile(0.3)] + if not good_defense.empty: + opportunities.append(f"🛡️ {len(good_defense)} lineups excel defensively") + + # Offensive specialists + good_offense = df[df['avg_offensive_rating'] > df['avg_offensive_rating'].quantile(0.7)] + if not good_offense.empty: + opportunities.append(f"⚡ {len(good_offense)} lineups excel offensively") + + # Default opportunities if none found + if not opportunities: + opportunities = [ + "🔍 More data needed for detailed analysis", + "📊 Continue monitoring lineup performance", + "🎯 Focus on small sample size lineups" + ] + + for opportunity in opportunities: + st.info(opportunity) + + +def main(): + """Main application function.""" + try: + # Render page header + render_page_header() + st.markdown("---") + + # Render filter controls + filter_result = render_analysis_filters() + if not filter_result: + st.error("Unable to load filter options. Please check system connectivity.") + return + + team_id, team_name, min_games, season, sort_by, sort_by_display = filter_result + + st.markdown("---") + + # Load lineup analysis data + with st.spinner(f"🔍 Analyzing lineup effectiveness for {team_name}..."): + lineup_data = load_lineup_analysis(team_id, min_games, season, sort_by) + + # Process data + if lineup_data and api.safe_get(lineup_data, 'lineup_effectiveness'): + df = pd.DataFrame(lineup_data['lineup_effectiveness']) + + if df.empty: + st.warning(f""" + ⚠️ No lineup data found for **{team_name}** with current filters. + + **Try adjusting your parameters:** + - Lower the minimum games requirement ({min_games} currently) + - Remove the season filter ({season} currently) + - Check if the team has sufficient game data + """) + return + else: + # Use fallback sample data for demonstration + st.info("📊 Using sample data - API connection unavailable") + df = generate_sample_lineup_data() + + # Display analysis results + render_lineup_effectiveness_table(df, sort_by_display) + + st.markdown("---") + render_performance_visualizations(df) + + st.markdown("---") + render_insights_and_recommendations(df, team_name) + + st.markdown("---") + render_rotation_analysis(df) + + st.markdown("---") + render_lineup_optimizer() + + # Footer with analysis details + st.markdown("---") + + col1, col2, col3 = st.columns(3) + + with col1: + st.caption(f"**Team:** {team_name}") + + with col2: + st.caption(f"**Minimum Games:** {min_games}") + + with col3: + st.caption(f"**Sorted by:** {sort_by_display}") + + st.caption(f"*Analysis generated on {datetime.now().strftime('%B %d, %Y at %I:%M %p')}*") + + except Exception as e: + st.error("An error occurred while loading the lineup analysis.") + logger.error(f"Lineup analysis error: {e}") + + if st.button("🔄 Retry Analysis"): + st.cache_data.clear() + st.rerun() + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/app/src/pages/33_Season_Summaries.py b/app/src/pages/33_Season_Summaries.py new file mode 100644 index 0000000000..9c75c203cd --- /dev/null +++ b/app/src/pages/33_Season_Summaries.py @@ -0,0 +1,888 @@ +################################################## +# BallWatch Basketball Analytics - Season Performance Summaries +# Comprehensive season tracking and goal progress monitoring +# +# User Story: Marcus-3.6 - Season performance summaries and goal tracking +# +# Features: +# - Season overview with key metrics +# - Monthly performance trends +# - Goal progress tracking +# - League comparison and rankings +# - Playoff projection analysis +################################################## + +import logging +import streamlit as st +import requests +import pandas as pd +import plotly.express as px +import plotly.graph_objects as go +from datetime import datetime, timedelta +import numpy as np +from modules.nav import SideBarLinks, check_authentication + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Page configuration +st.set_page_config( + page_title="BallWatch - Season Summary", + layout="wide" +) + +# Check authentication and role +check_authentication('head_coach') +SideBarLinks() + +# Constants +BASE_URL = "http://api:4000" +CURRENT_SEASON = "2024-25" +DEFAULT_TEAM_ID = st.session_state.get('team_id', 1) +COACH_NAME = st.session_state.get('first_name', 'Coach') + +# Styling +st.markdown(""" + +""", unsafe_allow_html=True) + + +class SeasonAnalysisAPI: + """API client for season analysis with comprehensive error handling.""" + + @staticmethod + def make_request(endpoint, params=None, timeout=15): + """Make API request with error handling.""" + try: + url = f"{BASE_URL}{endpoint}" + response = requests.get(url, params=params, timeout=timeout) + + if response.status_code == 200: + return response.json() + else: + logger.warning(f"API request failed: {response.status_code}") + return None + + except requests.exceptions.RequestException as e: + logger.error(f"API request error: {e}") + return None + + @staticmethod + def safe_get(data, *keys, default=None): + """Safely extract nested values.""" + try: + for key in keys: + data = data[key] + return data + except (KeyError, TypeError, AttributeError): + return default + + +# Initialize API client +api = SeasonAnalysisAPI() + + +@st.cache_data(ttl=600, show_spinner=False) +def load_season_summary(team_id, season=CURRENT_SEASON): + """Load comprehensive season summary data.""" + params = { + "entity_type": "team", + "entity_id": team_id, + "season": season, + "include_trends": "true", + "include_goals": "true" + } + return api.make_request("/api/analytics/season-summaries", params=params) + + +@st.cache_data(ttl=300, show_spinner=False) +def load_team_info(team_id): + """Load basic team information.""" + return api.make_request(f"/api/teams/{team_id}") + + +def generate_sample_season_data(): + """Generate realistic sample season data for demonstration.""" + return { + 'summary': { + 'team_name': 'Brooklyn Nets', + 'conference': 'Eastern', + 'division': 'Atlantic', + 'coach': 'Marcus Thompson', + 'games_played': 41, + 'wins': 24, + 'losses': 17, + 'avg_points_scored': 114.2, + 'avg_points_allowed': 110.8, + 'home_games': 21, + 'away_games': 20, + 'win_percentage': 58.5, + 'net_rating': 3.4, + 'games_remaining': 41 + } + } + + +def generate_monthly_performance_data(): + """Generate month-by-month performance data.""" + months = ['Oct', 'Nov', 'Dec', 'Jan', 'Feb', 'Mar'] + data = [] + + for i, month in enumerate(months): + if i < 4: # Only show completed months + wins = [3, 8, 6, 7][i] + games = [5, 12, 9, 10][i] + losses = games - wins + ppg = 110 + (i * 2) + np.random.normal(0, 2) + opp_ppg = 108 + (i * 1.5) + np.random.normal(0, 2) + + data.append({ + 'month': month, + 'games': games, + 'wins': wins, + 'losses': losses, + 'win_pct': wins / games, + 'ppg': round(ppg, 1), + 'opp_ppg': round(opp_ppg, 1), + 'net_rating': round(ppg - opp_ppg, 1) + }) + + return pd.DataFrame(data) + + +def render_page_header(): + """Render the page header with season selector.""" + col1, col2, col3 = st.columns([2, 1, 1]) + + with col1: + st.title("📈 Season Performance Summary") + st.markdown("*Track progress and analyze season trends*") + + with col2: + selected_season = st.selectbox( + "Season:", + options=["2024-25", "2023-24", "2022-23"], + index=0, + help="Choose which season to analyze" + ) + + with col3: + if st.button("🔄 Refresh Summary", help="Refresh season data"): + st.cache_data.clear() + st.rerun() + + return selected_season + + +def render_season_overview(season_data, season): + """Render comprehensive season overview.""" + st.subheader("🏀 Season Overview") + + if not season_data: + season_data = generate_sample_season_data() + + summary = api.safe_get(season_data, 'summary', {}) + + # Main overview card + team_name = summary.get('team_name', 'Brooklyn Nets') + wins = summary.get('wins', 24) + losses = summary.get('losses', 17) + games_played = summary.get('games_played', 41) + win_pct = (wins / games_played * 100) if games_played > 0 else 0 + + st.markdown(f""" +
+

🏀 {team_name} - {season} Season

+

{wins}-{losses}

+

{win_pct:.1f}% Win Percentage

+

{summary.get('games_remaining', 41)} games remaining in regular season

+
+ """, unsafe_allow_html=True) + + # Detailed metrics + col1, col2, col3, col4, col5 = st.columns(5) + + metrics = [ + ("Points/Game", summary.get('avg_points_scored', 114.2), "⚡"), + ("Opp Points/Game", summary.get('avg_points_allowed', 110.8), "🛡️"), + ("Net Rating", summary.get('net_rating', 3.4), "📊"), + ("Home Record", f"{summary.get('home_wins', 13)}-{summary.get('home_losses', 8)}", "🏠"), + ("Road Record", f"{summary.get('away_wins', 11)}-{summary.get('away_losses', 9)}", "✈️") + ] + + columns = [col1, col2, col3, col4, col5] + + for i, (label, value, emoji) in enumerate(metrics): + with columns[i]: + if isinstance(value, (int, float)) and label != "Net Rating": + st.metric(f"{emoji} {label}", f"{value:.1f}") + else: + st.metric(f"{emoji} {label}", str(value)) + + +def render_monthly_trends(): + """Render month-by-month performance trends.""" + st.subheader("📅 Monthly Performance Trends") + + # Generate or load monthly data + monthly_df = generate_monthly_performance_data() + + col1, col2 = st.columns(2) + + with col1: + # Monthly record chart + fig_record = go.Figure() + + fig_record.add_trace(go.Bar( + name='Wins', + x=monthly_df['month'], + y=monthly_df['wins'], + marker_color='#28a745', + text=monthly_df['wins'], + textposition='auto' + )) + + fig_record.add_trace(go.Bar( + name='Losses', + x=monthly_df['month'], + y=monthly_df['losses'], + marker_color='#dc3545', + text=monthly_df['losses'], + textposition='auto' + )) + + fig_record.update_layout( + title='Monthly Wins & Losses', + xaxis_title='Month', + yaxis_title='Games', + barmode='stack', + height=400, + showlegend=True + ) + + st.plotly_chart(fig_record, use_container_width=True) + + with col2: + # Net rating trend + fig_rating = px.line( + monthly_df, + x='month', + y='net_rating', + title='Monthly Net Rating Trend', + labels={'net_rating': 'Net Rating', 'month': 'Month'}, + line_shape='spline', + markers=True + ) + + fig_rating.update_traces( + line=dict(width=4, color='#007bff'), + marker=dict(size=10) + ) + + # Add horizontal line at 0 + fig_rating.add_hline(y=0, line_dash="dash", line_color="gray", opacity=0.7) + + fig_rating.update_layout(height=400) + + st.plotly_chart(fig_rating, use_container_width=True) + + +def render_goals_progress(): + """Render progress toward season goals.""" + st.subheader("🎯 Season Goals Progress") + + # Mock goals data with realistic targets + goals = { + 'playoff_berth': { + 'target': 'Make Playoffs', + 'current_status': 'On Track', + 'probability': 0.72, + 'description': 'Currently 6th seed with 41 games remaining' + }, + 'wins_target': { + 'target': 45, + 'current': 24, + 'games_played': 41, + 'pace': round((24/41) * 82, 1), + 'description': 'On pace for 48 wins this season' + }, + 'development_goals': [ + { + 'goal': 'Improve Defensive Rating', + 'target': 108.0, + 'current': 110.8, + 'status': 'needs_work' + }, + { + 'goal': 'Increase Three-Point %', + 'target': 38.0, + 'current': 36.5, + 'status': 'improving' + }, + { + 'goal': 'Reduce Turnovers', + 'target': 12.0, + 'current': 13.4, + 'status': 'needs_work' + }, + { + 'goal': 'Improve Bench Scoring', + 'target': 35.0, + 'current': 32.1, + 'status': 'on_track' + } + ] + } + + col1, col2, col3 = st.columns(3) + + with col1: + st.markdown("#### 🏆 Primary Goals") + + # Playoff probability + playoff_data = goals['playoff_berth'] + prob = playoff_data['probability'] + + if prob > 0.7: + prob_color = "achievement-card" + prob_icon = "🟢" + elif prob > 0.4: + prob_color = "warning-card" + prob_icon = "🟡" + else: + prob_color = "danger-card" + prob_icon = "🔴" + + st.markdown(f""" +
+
{prob_icon} Playoff Chances
+

{prob:.1%}

+

{playoff_data['description']}

+
+ """, unsafe_allow_html=True) + + # Wins target + wins_data = goals['wins_target'] + wins_pace = wins_data['pace'] + wins_target = wins_data['target'] + + if wins_pace >= wins_target: + wins_color = "achievement-card" + wins_icon = "🎯" + elif wins_pace >= wins_target - 3: + wins_color = "warning-card" + wins_icon = "📊" + else: + wins_color = "danger-card" + wins_icon = "📉" + + st.markdown(f""" +
+
{wins_icon} Win Target Progress
+

{wins_data['current']}/{wins_target} wins

+

Pace: {wins_pace} wins

+

{wins_data['description']}

+
+ """, unsafe_allow_html=True) + + with col2: + st.markdown("#### 📈 Development Areas") + + for goal in goals['development_goals']: + status = goal['status'] + + if status == 'on_track': + card_class = "achievement-card" + icon = "✅" + elif status == 'improving': + card_class = "warning-card" + icon = "📈" + else: + card_class = "danger-card" + icon = "❌" + + if goal['goal'] == 'Increase Three-Point %': + target_display = f"{goal['target']:.1f}%" + current_display = f"{goal['current']:.1f}%" + else: + target_display = f"{goal['target']:.1f}" + current_display = f"{goal['current']:.1f}" + + st.markdown(f""" +
+

{icon} {goal['goal']}

+

Target: {target_display} | Current: {current_display}

+
+ """, unsafe_allow_html=True) + + with col3: + st.markdown("#### ⭐ Key Milestones") + + milestones = [ + {"milestone": "Reach .500 record", "status": "achieved", "progress": 1.0}, + {"milestone": "Win 30 games", "status": "pending", "progress": 24/30}, + {"milestone": "Top 6 seed", "status": "on_track", "progress": 0.8}, + {"milestone": "Home court advantage", "status": "pending", "progress": 0.6} + ] + + for milestone in milestones: + status = milestone['status'] + progress = milestone['progress'] + + if status == 'achieved': + icon = "✅" + color = "#28a745" + elif status == 'on_track': + icon = "🟢" + color = "#ffc107" + else: + icon = "⏳" + color = "#6c757d" + + st.markdown(f"**{icon} {milestone['milestone']}**") + + if progress < 1.0: + st.progress(progress) + st.caption(f"{progress:.1%} complete") + else: + st.caption("✅ Completed!") + + +def render_detailed_statistics(season_data): + """Render detailed season statistics breakdown.""" + st.subheader("📊 Detailed Season Statistics") + + summary = api.safe_get(season_data, 'summary', {}) if season_data else generate_sample_season_data()['summary'] + + col1, col2, col3 = st.columns(3) + + with col1: + st.markdown("#### ⚡ Offensive Statistics") + + # Calculate or use default offensive stats + ppg = summary.get('avg_points_scored', 114.2) + fg_pct = 47.1 # Default FG% + three_pct = 36.5 # Default 3P% + ft_pct = 79.8 # Default FT% + apg = 26.8 # Default assists + + offensive_stats = [ + ("Points Per Game", f"{ppg:.1f}", "Primary scoring output"), + ("Field Goal %", f"{fg_pct:.1f}%", "Overall shooting efficiency"), + ("Three-Point %", f"{three_pct:.1f}%", "Perimeter shooting"), + ("Free Throw %", f"{ft_pct:.1f}%", "Charity stripe efficiency"), + ("Assists Per Game", f"{apg:.1f}", "Ball movement and sharing") + ] + + for stat, value, description in offensive_stats: + col_a, col_b = st.columns([2, 1]) + with col_a: + st.write(f"**{stat}:**") + st.caption(description) + with col_b: + st.write(f"**{value}**") + + with col2: + st.markdown("#### 🛡️ Defensive Statistics") + + opp_ppg = summary.get('avg_points_allowed', 110.8) + opp_fg_pct = 45.2 # Default opponent FG% + steals = 7.9 # Default steals + blocks = 5.2 # Default blocks + def_reb = 34.1 # Default defensive rebounds + + defensive_stats = [ + ("Opponent Points/Game", f"{opp_ppg:.1f}", "Points allowed"), + ("Opponent FG %", f"{opp_fg_pct:.1f}%", "Opponent shooting %"), + ("Steals Per Game", f"{steals:.1f}", "Defensive pressure"), + ("Blocks Per Game", f"{blocks:.1f}", "Rim protection"), + ("Defensive Rebounds", f"{def_reb:.1f}", "Securing possessions") + ] + + for stat, value, description in defensive_stats: + col_a, col_b = st.columns([2, 1]) + with col_a: + st.write(f"**{stat}:**") + st.caption(description) + with col_b: + st.write(f"**{value}**") + + with col3: + st.markdown("#### 📊 Advanced Metrics") + + net_rating = summary.get('net_rating', 3.4) + pace = 98.5 # Default pace + eff_fg_pct = 51.2 # Default effective FG% + ts_pct = 57.8 # Default true shooting % + turnover_pct = 13.4 # Default turnover rate + + advanced_stats = [ + ("Net Rating", f"{net_rating:+.1f}", "Point differential per 100 poss"), + ("Pace", f"{pace:.1f}", "Possessions per 48 minutes"), + ("Effective FG%", f"{eff_fg_pct:.1f}%", "Shooting efficiency w/ 3P weight"), + ("True Shooting %", f"{ts_pct:.1f}%", "Overall shooting efficiency"), + ("Turnover Rate", f"{turnover_pct:.1f}%", "Turnovers per possession") + ] + + for stat, value, description in advanced_stats: + col_a, col_b = st.columns([2, 1]) + with col_a: + st.write(f"**{stat}:**") + st.caption(description) + with col_b: + st.write(f"**{value}**") + + +def render_conference_standings(): + """Render Eastern Conference standings context.""" + st.subheader("🏆 Eastern Conference Standings") + + # Mock current standings + standings = [ + {"rank": 1, "team": "Boston Celtics", "record": "32-9", "gb": "-", "streak": "W3"}, + {"rank": 2, "team": "Miami Heat", "record": "28-13", "gb": "4.0", "streak": "L1"}, + {"rank": 3, "team": "Philadelphia 76ers", "record": "26-15", "gb": "6.0", "streak": "W2"}, + {"rank": 4, "team": "New York Knicks", "record": "25-16", "gb": "7.0", "streak": "W1"}, + {"rank": 5, "team": "Orlando Magic", "record": "25-16", "gb": "7.0", "streak": "L2"}, + {"rank": 6, "team": "Brooklyn Nets", "record": "24-17", "gb": "8.0", "streak": "W1"}, + {"rank": 7, "team": "Atlanta Hawks", "record": "22-19", "gb": "10.0", "streak": "L1"}, + {"rank": 8, "team": "Chicago Bulls", "record": "21-20", "gb": "11.0", "streak": "W2"}, + {"rank": 9, "team": "Toronto Raptors", "record": "20-21", "gb": "12.0", "streak": "L3"}, + {"rank": 10, "team": "Indiana Pacers", "record": "19-22", "gb": "13.0", "streak": "W1"} + ] + + col1, col2 = st.columns([2, 1]) + + with col1: + st.markdown("#### Current Standings") + + # Create standings table + standings_html = """ + + + + + + + + + + + + """ + + for team in standings[:10]: + row_class = "standings-highlight" if "Brooklyn Nets" in team['team'] else "" + standings_html += f""" + + + + + + + + """ + + standings_html += "
RankTeamRecordGBStreak
{team['rank']}{team['team']}{team['record']}{team['gb']}{team['streak']}
" + st.markdown(standings_html, unsafe_allow_html=True) + + with col2: + st.markdown("#### 🎯 Playoff Picture") + + # Playoff positioning info + current_seed = 6 + games_back = 8.0 + + st.markdown(f""" +
+
Current Position
+

#{current_seed} Seed

+

{games_back} games back of 1st

+
+

Playoff Status: In Position

+

Play-in Status: Avoided

+
+ """, unsafe_allow_html=True) + + # Critical upcoming games + st.markdown("#### 🔥 Critical Games") + critical_games = [ + "vs Boston (1st seed)", + "@ Miami (2nd seed)", + "vs Philadelphia (3rd seed)" + ] + + for game in critical_games: + st.markdown(f"🔴 **{game}**") + + +def render_upcoming_challenges(): + """Render analysis of upcoming schedule challenges.""" + st.subheader("🗓️ Upcoming Schedule Analysis") + + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### 📅 Next 10 Games Difficulty") + + # Mock upcoming schedule with difficulty ratings + upcoming_schedule = [ + {"date": "Jan 15", "opponent": "Boston Celtics", "location": "vs", "difficulty": "Hard"}, + {"date": "Jan 17", "opponent": "Miami Heat", "location": "@", "difficulty": "Hard"}, + {"date": "Jan 19", "opponent": "Philadelphia 76ers", "location": "vs", "difficulty": "Medium"}, + {"date": "Jan 21", "opponent": "Charlotte Hornets", "location": "@", "difficulty": "Easy"}, + {"date": "Jan 23", "opponent": "Detroit Pistons", "location": "vs", "difficulty": "Easy"}, + {"date": "Jan 25", "opponent": "Orlando Magic", "location": "@", "difficulty": "Medium"}, + {"date": "Jan 27", "opponent": "Atlanta Hawks", "location": "vs", "difficulty": "Medium"}, + {"date": "Jan 29", "opponent": "Toronto Raptors", "location": "@", "difficulty": "Easy"}, + {"date": "Jan 31", "opponent": "Chicago Bulls", "location": "vs", "difficulty": "Medium"}, + {"date": "Feb 2", "opponent": "Indiana Pacers", "location": "@", "difficulty": "Easy"} + ] + + hard_games = medium_games = easy_games = 0 + + for game in upcoming_schedule: + difficulty = game['difficulty'] + location_icon = "🏠" if game['location'] == "vs" else "✈️" + + if difficulty == "Hard": + color = "🔴" + hard_games += 1 + elif difficulty == "Medium": + color = "🟡" + medium_games += 1 + else: + color = "🟢" + easy_games += 1 + + st.markdown(f"{color} {location_icon} **{game['date']}** - {game['opponent']}") + + # Schedule difficulty summary + st.markdown("---") + st.markdown(f""" + **Schedule Difficulty Breakdown:** + - 🔴 Hard: {hard_games} games + - 🟡 Medium: {medium_games} games + - 🟢 Easy: {easy_games} games + """) + + with col2: + st.markdown("#### 🎯 Keys to Success") + + success_factors = [ + { + "factor": "Stay healthy through tough stretch", + "importance": "Critical", + "note": "Avoid injuries in physical games" + }, + { + "factor": "Improve road performance", + "importance": "High", + "note": "Currently 11-9 on the road" + }, + { + "factor": "Win games vs bottom teams", + "importance": "High", + "note": "Must beat Hornets, Pistons, Raptors" + }, + { + "factor": "Split vs top East teams", + "importance": "Medium", + "note": "Go 2-2 vs Celtics, Heat, 76ers" + }, + { + "factor": "Strengthen bench production", + "importance": "Medium", + "note": "Bench needs to step up in big games" + } + ] + + for i, factor in enumerate(success_factors, 1): + importance = factor['importance'] + + if importance == "Critical": + icon = "🔴" + elif importance == "High": + icon = "🟡" + else: + icon = "🔵" + + st.markdown(f"**{i}. {icon} {factor['factor']}**") + st.caption(factor['note']) + st.write("") + + +def render_season_projections(): + """Render season projections and scenarios.""" + st.subheader("🔮 Season Projections") + + col1, col2 = st.columns(2) + + with col1: + st.markdown("#### 📈 Win Projections") + + # Current pace calculation + current_wins = 24 + games_played = 41 + current_pace = (current_wins / games_played) * 82 + + # Scenario projections + scenarios = [ + {"name": "Current Pace", "wins": round(current_pace), "probability": "50%", "description": "Maintain current performance"}, + {"name": "Optimistic", "wins": 50, "probability": "25%", "description": "Key players stay healthy, chemistry improves"}, + {"name": "Pessimistic", "wins": 42, "probability": "25%", "description": "Injuries or chemistry issues"} + ] + + for scenario in scenarios: + wins = scenario['wins'] + prob = scenario['probability'] + + if wins >= 47: + color = "achievement-card" + elif wins >= 44: + color = "warning-card" + else: + color = "danger-card" + + st.markdown(f""" +
+
{scenario['name']} Scenario
+

{wins} wins ({prob})

+

{scenario['description']}

+
+ """, unsafe_allow_html=True) + + with col2: + st.markdown("#### 🏆 Playoff Scenarios") + + # Playoff probability breakdown + playoff_scenarios = [ + {"seed": "1-2 seed", "probability": 5, "description": "Need significant improvement"}, + {"seed": "3-4 seed", "probability": 15, "description": "Requires strong finish"}, + {"seed": "5-6 seed", "probability": 45, "description": "Most likely scenario"}, + {"seed": "7-8 seed", "probability": 25, "description": "Play-in tournament"}, + {"seed": "Miss playoffs", "probability": 10, "description": "Significant decline needed"} + ] + + # Create probability chart + seed_names = [s['seed'] for s in playoff_scenarios] + probabilities = [s['probability'] for s in playoff_scenarios] + + fig_playoff = px.pie( + values=probabilities, + names=seed_names, + title='Playoff Seeding Probabilities', + color_discrete_sequence=px.colors.qualitative.Set3 + ) + + fig_playoff.update_traces( + textposition='inside', + textinfo='percent+label', + hovertemplate="%{label}
Probability: %{percent}" + ) + + fig_playoff.update_layout(height=400) + st.plotly_chart(fig_playoff, use_container_width=True) + + +def main(): + """Main application function.""" + try: + # Render page header and get season + selected_season = render_page_header() + st.markdown("---") + + # Load season data + with st.spinner(f"📊 Loading {selected_season} season summary..."): + season_data = load_season_summary(DEFAULT_TEAM_ID, selected_season) + + # Render all analysis sections + render_season_overview(season_data, selected_season) + + st.markdown("---") + render_monthly_trends() + + st.markdown("---") + render_goals_progress() + + st.markdown("---") + render_detailed_statistics(season_data) + + st.markdown("---") + render_conference_standings() + + st.markdown("---") + render_upcoming_challenges() + + st.markdown("---") + render_season_projections() + + # Footer with timestamp + st.markdown("---") + + col1, col2, col3 = st.columns(3) + + with col1: + st.caption(f"**Season:** {selected_season}") + + with col2: + st.caption(f"**Team:** Brooklyn Nets") + + with col3: + st.caption(f"**Updated:** {datetime.now().strftime('%B %d, %Y')}") + + except Exception as e: + st.error("An error occurred while loading the season summary.") + logger.error(f"Season summary error: {e}") + + if st.button("🔄 Retry"): + st.cache_data.clear() + st.rerun() + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/api/backend/ml_models/__init__.py b/app/src/pages/34_Player_Matchups.py similarity index 100% rename from api/backend/ml_models/__init__.py rename to app/src/pages/34_Player_Matchups.py diff --git a/app/src/pages/40_General_Manager_Home.py b/app/src/pages/40_General_Manager_Home.py new file mode 100644 index 0000000000..6b1dd6723a --- /dev/null +++ b/app/src/pages/40_General_Manager_Home.py @@ -0,0 +1,28 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks + +st.set_page_config(layout='wide') +SideBarLinks() + +st.title(f"Welcome General Manager, {st.session_state.get('first_name', 'Guest')}.") +st.write('') +st.write('') +st.write('### What would you like to do today?') + +if st.button('Track Player Progress', + type='primary', + use_container_width=True): + st.switch_page('pages/41_Player_Progress.py') + +if st.button('Updated Draft Rankings', + type='primary', + use_container_width=True): + st.switch_page('pages/42_Draft_Rankings.py') + +if st.button('Free Agent Simulation', + type='primary', + use_container_width=True): + st.switch_page('pages/43_Contract_Efficiency.py') \ No newline at end of file diff --git a/app/src/pages/41_Player_Progress.py b/app/src/pages/41_Player_Progress.py new file mode 100644 index 0000000000..1a69f04576 --- /dev/null +++ b/app/src/pages/41_Player_Progress.py @@ -0,0 +1,280 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +import pandas as pd +import plotly.express as px +import plotly.graph_objects as go +import requests +from datetime import datetime +from modules.nav import SideBarLinks + +st.set_page_config(layout='wide') +SideBarLinks() +st.title('Player Progress & Development') + +BASE_URL = "http://api:4000" + +def make_request(endpoint, method='GET', data=None): + try: + url = f"{BASE_URL}{endpoint}" + if method == 'GET': + response = requests.get(url) + elif method == 'PUT': + response = requests.put(url, json=data) + + if response.status_code in [200, 201]: + return response.json() + else: + st.error(f"API Error: {response.status_code} - {response.text}") + return None + except Exception as e: + st.error(f"Connection Error: {str(e)}") + return None + +# Load data +if st.button("Load Player Data"): + players_data = make_request("/basketball/players") + if players_data and 'players' in players_data: + st.session_state['players'] = players_data['players'] + st.success(f"Loaded {len(players_data['players'])} players") + + # Also load evaluations for development insights + eval_data = make_request("/strategy/draft-evaluations") + if eval_data and 'evaluations' in eval_data: + st.session_state['evaluations'] = eval_data['evaluations'] + +# Main content +if 'players' in st.session_state or 'evaluations' in st.session_state: + tab1, tab2, tab3 = st.tabs(["Progress Overview", "Individual Tracking", "Development Plans"]) + + with tab1: + st.header("Player Progress Overview") + + # Use evaluations data if available, otherwise use players data + if 'evaluations' in st.session_state: + evaluations = st.session_state['evaluations'] + df = pd.DataFrame(evaluations) + + # Create progress metrics + if not df.empty and 'overall_rating' in df.columns and 'potential_rating' in df.columns: + # Convert rating columns to numeric + df['overall_rating'] = pd.to_numeric(df['overall_rating'], errors='coerce').fillna(50) + df['potential_rating'] = pd.to_numeric(df['potential_rating'], errors='coerce').fillna(50) + df['offensive_rating'] = pd.to_numeric(df.get('offensive_rating', 50), errors='coerce').fillna(50) + df['defensive_rating'] = pd.to_numeric(df.get('defensive_rating', 50), errors='coerce').fillna(50) + + df['progress_score'] = df['potential_rating'] - df['overall_rating'] + + col1, col2, col3, col4 = st.columns(4) + + with col1: + st.metric("Total Players", len(df)) + + with col2: + high_potential = len(df[df['potential_rating'] >= 85]) + st.metric("High Potential", high_potential) + + with col3: + avg_rating = df['overall_rating'].mean() + st.metric("Avg Rating", f"{avg_rating:.1f}") + + with col4: + avg_progress = df['progress_score'].mean() + st.metric("Avg Growth Room", f"{avg_progress:.1f}") + + # Progress scatter plot + if len(df) > 0: + fig = px.scatter( + df, + x='overall_rating', + y='potential_rating', + color='position' if 'position' in df.columns else None, + title="Current Rating vs Potential", + labels={ + 'overall_rating': 'Current Rating', + 'potential_rating': 'Potential Rating' + } + ) + + # Add diagonal reference line + fig.add_trace( + go.Scatter(x=[0, 100], y=[0, 100], + mode='lines', + line=dict(dash='dash', color='gray'), + showlegend=False, + name='Equal Line') + ) + + st.plotly_chart(fig, use_container_width=True) + + # Position breakdown + if 'position' in df.columns: + position_stats = df.groupby('position').agg({ + 'overall_rating': 'mean', + 'potential_rating': 'mean', + 'progress_score': 'mean' + }).round(1) + + fig_pos = px.bar( + position_stats, + y=position_stats.index, + x='progress_score', + title="Average Growth Potential by Position", + orientation='h' + ) + st.plotly_chart(fig_pos, use_container_width=True) + else: + st.warning("Missing rating data for progress calculations.") + + with tab2: + st.header("Individual Player Tracking") + + if 'evaluations' in st.session_state: + evaluations = st.session_state['evaluations'] + + if evaluations: + # Player selector + player_options = {} + for eval in evaluations: + display_name = f"{eval['first_name']} {eval['last_name']} ({eval.get('position', 'N/A')})" + player_options[display_name] = eval + + selected_player = st.selectbox( + "Select Player:", + options=list(player_options.keys()) + ) + + if selected_player: + player_data = player_options[selected_player] + + col1, col2 = st.columns(2) + + with col1: + st.subheader(f"{player_data['first_name']} {player_data['last_name']}") + + # Convert ratings to numeric for display + overall_rating = pd.to_numeric(player_data.get('overall_rating', 0), errors='coerce') or 0 + potential_rating = pd.to_numeric(player_data.get('potential_rating', 0), errors='coerce') or 0 + growth_room = potential_rating - overall_rating + + # Player info + st.info(f""" + **Player Profile** + - Position: {player_data.get('position', 'N/A')} + - Overall Rating: {overall_rating} + - Potential Rating: {potential_rating} + - Growth Room: {growth_room} points + """) + + # Strengths and weaknesses + if player_data.get('strengths'): + st.success(f"**Strengths:** {player_data['strengths']}") + + if player_data.get('weaknesses'): + st.warning(f"**Weaknesses:** {player_data['weaknesses']}") + + with col2: + # Rating breakdown radar chart + categories = ['Overall', 'Offense', 'Defense', 'Potential'] + values = [ + pd.to_numeric(player_data.get('overall_rating', 50), errors='coerce') or 50, + pd.to_numeric(player_data.get('offensive_rating', 50), errors='coerce') or 50, + pd.to_numeric(player_data.get('defensive_rating', 50), errors='coerce') or 50, + pd.to_numeric(player_data.get('potential_rating', 50), errors='coerce') or 50 + ] + + fig_radar = go.Figure() + fig_radar.add_trace(go.Scatterpolar( + r=values, + theta=categories, + fill='toself', + name='Current Ratings' + )) + + fig_radar.update_layout( + polar=dict( + radialaxis=dict(visible=True, range=[0, 100]) + ), + title="Player Rating Breakdown" + ) + st.plotly_chart(fig_radar, use_container_width=True) + + # Scout notes + if player_data.get('scout_notes'): + st.text_area("Scout Notes", value=player_data['scout_notes'], disabled=True) + + with tab3: + st.header("Development Plans") + + if 'evaluations' in st.session_state: + evaluations = st.session_state['evaluations'] + df = pd.DataFrame(evaluations) + + if not df.empty and 'overall_rating' in df.columns and 'potential_rating' in df.columns: + # Convert rating columns to numeric + df['overall_rating'] = pd.to_numeric(df['overall_rating'], errors='coerce').fillna(50) + df['potential_rating'] = pd.to_numeric(df['potential_rating'], errors='coerce').fillna(50) + + # Calculate development priority + df['growth_potential'] = df['potential_rating'] - df['overall_rating'] + df['development_priority'] = df['growth_potential'] + + # Sort by development priority + df_sorted = df.nlargest(10, 'development_priority') + + st.subheader("Top Development Priorities") + + # Development priority chart + if not df_sorted.empty: + fig_dev = px.bar( + df_sorted, + x='development_priority', + y=df_sorted.apply(lambda x: f"{x['first_name']} {x['last_name']}", axis=1), + orientation='h', + title="Players with Highest Development Potential", + labels={'development_priority': 'Growth Potential (Points)', 'y': 'Player'} + ) + st.plotly_chart(fig_dev, use_container_width=True) + + # Development recommendations table + st.subheader("Development Recommendations") + + if not df_sorted.empty: + dev_table = df_sorted[['first_name', 'last_name', 'position', 'overall_rating', 'potential_rating', 'growth_potential']].copy() + dev_table['recommendation'] = dev_table['growth_potential'].apply( + lambda x: 'Priority Focus' if x > 20 else 'Regular Development' if x > 10 else 'Maintain Current' + ) + + st.dataframe( + dev_table.style.background_gradient(subset=['growth_potential'], cmap='YlOrRd'), + use_container_width=True + ) + + # Development insights + st.subheader("Development Insights") + + col1, col2 = st.columns(2) + + with col1: + avg_growth = df['growth_potential'].mean() + st.metric("Average Growth Potential", f"{avg_growth:.1f} points") + + with col2: + high_growth = len(df[df['growth_potential'] > 15]) + st.metric("High Growth Players", high_growth) + else: + st.warning("Missing rating data for development analysis.") + +else: + st.info("Click 'Load Player Data' to begin tracking player progress and development.") + + # Show sample data structure + st.subheader("Expected Data Structure") + st.code(""" + Player data should include: + - Player identification (first_name, last_name, position) + - Current ratings (overall_rating, offensive_rating, defensive_rating) + - Potential rating for development tracking + - Scouting information (strengths, weaknesses, scout_notes) + """) \ No newline at end of file diff --git a/app/src/pages/42_Draft_Rankings.py b/app/src/pages/42_Draft_Rankings.py new file mode 100644 index 0000000000..f1e59db219 --- /dev/null +++ b/app/src/pages/42_Draft_Rankings.py @@ -0,0 +1,95 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +import requests +from modules.nav import SideBarLinks + +st.set_page_config(layout='wide') +SideBarLinks() +st.title('Draft Rankings & Player Evaluations') + +BASE_URL = "http://api:4000" + +def make_request(endpoint, method='GET', data=None): + try: + url = f"{BASE_URL}{endpoint}" + if method == 'GET': + response = requests.get(url) + elif method == 'PUT': + response = requests.put(url, json=data) + + if response.status_code in [200, 201]: + return response.json() + else: + st.error(f"API Error: {response.status_code} - {response.text}") + return None + except Exception as e: + st.error(f"Connection Error: {str(e)}") + return None + +st.header("Update Player Evaluation") + +if st.button("Load Current Rankings"): + data = make_request("/strategy/draft-evaluations") + if data and 'evaluations' in data: + st.session_state['evaluations'] = data['evaluations'] + st.success(f"Loaded {len(data['evaluations'])} player evaluations") + +if 'evaluations' in st.session_state: + evaluations = st.session_state['evaluations'] + + player_options = {} + for eval in evaluations: + display_name = f"{eval['first_name']} {eval['last_name']} ({eval['position']}) - Rating: {eval['overall_rating']}" + player_options[display_name] = eval + + selected_player = st.selectbox( + "Select Player to Update:", + options=list(player_options.keys()) + ) + + if selected_player: + current = player_options[selected_player] + + st.subheader(f"Updating: {current['first_name']} {current['last_name']}") + + # Rating sliders + col1, col2 = st.columns(2) + + with col1: + overall_rating = st.slider("Overall Rating", 0, 100, + int(float(current.get('overall_rating', 50)))) + offensive_rating = st.slider("Offensive Rating", 0, 100, + int(float(current.get('offensive_rating', 50)))) + + with col2: + defensive_rating = st.slider("Defensive Rating", 0, 100, + int(float(current.get('defensive_rating', 50)))) + potential_rating = st.slider("Potential Rating", 0, 100, + int(float(current.get('potential_rating', 50)))) + + strengths = st.text_area("Strengths", value=current.get('strengths', '')) + weaknesses = st.text_area("Weaknesses", value=current.get('weaknesses', '')) + scout_notes = st.text_area("Scout Notes", value=current.get('scout_notes', '')) + + if st.button("Update Evaluation", type="primary"): + update_data = { + "overall_rating": overall_rating, + "offensive_rating": offensive_rating, + "defensive_rating": defensive_rating, + "potential_rating": potential_rating, + "strengths": strengths, + "weaknesses": weaknesses, + "scout_notes": scout_notes + } + + result = make_request(f"/strategy/draft-evaluations/{current['evaluation_id']}", + method='PUT', data=update_data) + + if result: + st.success("Evaluation updated successfully!") + # Clear cached data to force reload + if 'evaluations' in st.session_state: + del st.session_state['evaluations'] + st.rerun() \ No newline at end of file diff --git a/app/src/pages/43_Contract_Efficiency.py b/app/src/pages/43_Contract_Efficiency.py new file mode 100644 index 0000000000..4d6467794a --- /dev/null +++ b/app/src/pages/43_Contract_Efficiency.py @@ -0,0 +1,402 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +import pandas as pd +import plotly.express as px +import plotly.graph_objects as go +import requests +from datetime import datetime +from modules.nav import SideBarLinks + +st.set_page_config(layout='wide') +SideBarLinks() +st.title('Contract Efficiency & Free Agency Management') + +BASE_URL = "http://api:4000" + +def make_request(endpoint, method='GET', data=None): + try: + url = f"{BASE_URL}{endpoint}" + if method == 'GET': + response = requests.get(url) + elif method == 'PUT': + response = requests.put(url, json=data) + + if response.status_code in [200, 201]: + return response.json() + else: + st.error(f"API Error: {response.status_code} - {response.text}") + return None + except Exception as e: + st.error(f"Connection Error: {str(e)}") + return None + +# Load data +if st.button("Load Free Agent Data"): + # Load players data + players_data = make_request("/basketball/players") + if players_data and 'players' in players_data: + st.session_state['players'] = players_data['players'] + + # Load evaluations for contract efficiency analysis + eval_data = make_request("/strategy/draft-evaluations") + if eval_data and 'evaluations' in eval_data: + st.session_state['evaluations'] = eval_data['evaluations'] + st.success(f"Loaded {len(eval_data['evaluations'])} player evaluations") + +# Salary cap settings +SALARY_CAP = 136.0 +LUXURY_TAX = 165.0 +CURRENT_PAYROLL = 118.5 +available_cap = SALARY_CAP - CURRENT_PAYROLL +luxury_room = LUXURY_TAX - CURRENT_PAYROLL + +# Main content +if 'evaluations' in st.session_state: + evaluations = st.session_state['evaluations'] + df = pd.DataFrame(evaluations) + + # Add estimated salary based on rating + if not df.empty and 'overall_rating' in df.columns: + # Convert ratings to numeric + df['overall_rating'] = pd.to_numeric(df['overall_rating'], errors='coerce').fillna(50) + df['offensive_rating'] = pd.to_numeric(df.get('offensive_rating', 50), errors='coerce').fillna(50) + df['defensive_rating'] = pd.to_numeric(df.get('defensive_rating', 50), errors='coerce').fillna(50) + df['potential_rating'] = pd.to_numeric(df.get('potential_rating', 50), errors='coerce').fillna(50) + + # Estimate salary based on rating (simplified formula) + df['estimated_salary'] = (df['overall_rating'] * 0.5).round(1) + df['value_score'] = (df['overall_rating'] / (df['estimated_salary'] + 1)).round(2) + df['contract_efficiency'] = df['value_score'].apply( + lambda x: 'Excellent' if x > 5 else ('Good' if x > 3 else ('Fair' if x > 2 else 'Poor')) + ) + + # Top metrics + col1, col2, col3, col4 = st.columns(4) + + with col1: + st.metric("Available Players", len(df)) + + with col2: + st.metric("Cap Space", f"${available_cap:.1f}M") + + with col3: + high_value = len(df[df['value_score'] >= 4]) + st.metric("High Value Players", high_value) + + with col4: + affordable = len(df[df['estimated_salary'] <= available_cap]) + st.metric("Affordable Options", affordable) + + # Tabs for different analyses + tab1, tab2, tab3, tab4 = st.tabs(["Contract Values", "Market Analysis", "Player Comparison", "Signing Strategy"]) + + with tab1: + st.header("Contract Value Analysis") + + col1, col2 = st.columns([2, 1]) + + with col1: + # Value scatter plot + fig = px.scatter( + df, + x='estimated_salary', + y='overall_rating', + color='value_score', + size='potential_rating', + hover_data=['first_name', 'last_name', 'position'], + title="Player Value vs Estimated Contract", + labels={ + 'estimated_salary': 'Estimated Salary ($M)', + 'overall_rating': 'Overall Rating', + 'value_score': 'Value Score' + }, + color_continuous_scale='RdYlGn' + ) + + # Add value zones + fig.add_shape( + type="rect", x0=0, y0=75, x1=available_cap, y1=100, + fillcolor="green", opacity=0.1, + line=dict(color="green", width=2, dash="dash") + ) + fig.add_annotation( + x=available_cap/2, y=85, + text="Target Zone", + showarrow=False, + font=dict(color="green", size=12) + ) + + st.plotly_chart(fig, use_container_width=True) + + with col2: + st.subheader("Best Values") + + best_values = df.nlargest(5, 'value_score') + for _, player in best_values.iterrows(): + with st.container(): + st.success(f""" + **{player['first_name']} {player['last_name']}** + - Position: {player.get('position', 'N/A')} + - Rating: {player['overall_rating']:.0f} + - Est. Salary: ${player['estimated_salary']:.1f}M + - Value: {player['value_score']:.2f} + """) + + # Value rankings table + st.subheader("Contract Efficiency Rankings") + + efficiency_table = df.nlargest(10, 'value_score')[ + ['first_name', 'last_name', 'position', 'overall_rating', + 'estimated_salary', 'value_score', 'contract_efficiency'] + ].copy() + + st.dataframe( + efficiency_table.style.format({ + 'estimated_salary': '${:.1f}M', + 'overall_rating': '{:.0f}', + 'value_score': '{:.2f}' + }).background_gradient(subset=['value_score'], cmap='RdYlGn'), + use_container_width=True + ) + + with tab2: + st.header("Free Agent Market Analysis") + + col1, col2 = st.columns(2) + + with col1: + # Position market analysis + if 'position' in df.columns: + position_stats = df.groupby('position').agg({ + 'overall_rating': 'mean', + 'estimated_salary': 'mean', + 'value_score': 'mean' + }).round(1) + + fig_pos = px.bar( + position_stats, + x=position_stats.index, + y='value_score', + color='overall_rating', + title="Average Value Score by Position", + labels={'index': 'Position', 'value_score': 'Average Value Score'}, + color_continuous_scale='YlOrRd' + ) + st.plotly_chart(fig_pos, use_container_width=True) + + with col2: + # Rating distribution + fig_rating = px.histogram( + df, + x='overall_rating', + nbins=15, + title="Player Rating Distribution", + labels={'overall_rating': 'Overall Rating', 'count': 'Number of Players'} + ) + st.plotly_chart(fig_rating, use_container_width=True) + + # Salary vs Rating correlation + st.subheader("Market Rate Analysis") + + fig_correlation = px.scatter( + df, + x='overall_rating', + y='estimated_salary', + color='position' if 'position' in df.columns else None, + title="Rating vs Estimated Salary", + labels={'overall_rating': 'Overall Rating', 'estimated_salary': 'Estimated Salary ($M)'} + ) + + # Add manual trend line + if len(df) > 1: + x_min, x_max = df['overall_rating'].min(), df['overall_rating'].max() + y_min, y_max = df['estimated_salary'].min(), df['estimated_salary'].max() + + fig_correlation.add_trace( + go.Scatter( + x=[x_min, x_max], + y=[y_min, y_max], + mode='lines', + line=dict(dash='dash', color='red'), + name='Trend', + showlegend=False + ) + ) + + st.plotly_chart(fig_correlation, use_container_width=True) + + with tab3: + st.header("Player Comparison Tool") + + # Multi-select for comparison + player_options = {} + for _, player in df.iterrows(): + display_name = f"{player['first_name']} {player['last_name']} ({player.get('position', 'N/A')})" + player_options[display_name] = player + + selected_players = st.multiselect( + "Select Players to Compare:", + options=list(player_options.keys()), + max_selections=5 + ) + + if selected_players: + comparison_data = [] + + for player_name in selected_players: + player = player_options[player_name] + comparison_data.append({ + 'Player': f"{player['first_name']} {player['last_name']}", + 'Position': player.get('position', 'N/A'), + 'Overall': player['overall_rating'], + 'Offense': player['offensive_rating'], + 'Defense': player['defensive_rating'], + 'Potential': player['potential_rating'], + 'Est. Salary': player['estimated_salary'], + 'Value Score': player['value_score'] + }) + + comparison_df = pd.DataFrame(comparison_data) + + col1, col2 = st.columns([2, 1]) + + with col1: + # Comparison table + st.dataframe( + comparison_df.style.format({ + 'Est. Salary': '${:.1f}M', + 'Overall': '{:.0f}', + 'Offense': '{:.0f}', + 'Defense': '{:.0f}', + 'Potential': '{:.0f}', + 'Value Score': '{:.2f}' + }).background_gradient(subset=['Value Score'], cmap='RdYlGn'), + use_container_width=True + ) + + with col2: + # Radar chart comparison + fig_radar = go.Figure() + + for _, player in comparison_df.iterrows(): + fig_radar.add_trace(go.Scatterpolar( + r=[player['Overall'], player['Offense'], player['Defense'], player['Potential']], + theta=['Overall', 'Offense', 'Defense', 'Potential'], + fill='toself', + name=player['Player'] + )) + + fig_radar.update_layout( + polar=dict( + radialaxis=dict(visible=True, range=[0, 100]) + ), + title="Player Comparison Radar" + ) + st.plotly_chart(fig_radar, use_container_width=True) + + with tab4: + st.header("Contract Signing Strategy") + + # Budget allocation + st.subheader("Budget Allocation") + + col1, col2 = st.columns(2) + + with col1: + st.info(f""" + **Current Financial Status** + - Salary Cap: ${SALARY_CAP:.1f}M + - Current Payroll: ${CURRENT_PAYROLL:.1f}M + - Available Cap Space: ${available_cap:.1f}M + - Room to Luxury Tax: ${luxury_room:.1f}M + """) + + with col2: + # Contract simulator + st.subheader("Contract Simulator") + + if player_options: + selected_player = st.selectbox( + "Player to Sign:", + options=list(player_options.keys()) + ) + + if selected_player: + player = player_options[selected_player] + + offered_salary = st.slider( + "Offer Amount ($M)", + min_value=1.0, + max_value=min(50.0, available_cap), + value=float(player['estimated_salary']), + step=0.5 + ) + + contract_years = st.slider("Contract Years", 1, 5, 3) + + total_value = offered_salary * contract_years + + st.success(f""" + **Contract Offer** + - Player: {player['first_name']} {player['last_name']} + - Annual: ${offered_salary:.1f}M + - Total: ${total_value:.1f}M + - Cap Impact: ${offered_salary:.1f}M + """) + + # Fit analysis + if offered_salary <= available_cap: + remaining_cap = available_cap - offered_salary + st.success(f"Fits in cap space. Remaining: ${remaining_cap:.1f}M") + else: + st.error(f"Exceeds cap by ${offered_salary - available_cap:.1f}M") + + # Signing recommendations + st.subheader("Strategic Recommendations") + + rec_col1, rec_col2, rec_col3 = st.columns(3) + + with rec_col1: + # High value targets + high_value_players = df[df['value_score'] >= 4].nlargest(3, 'overall_rating') + + st.success("**High Value Targets**") + for _, player in high_value_players.iterrows(): + st.write(f"• {player['first_name']} {player['last_name']} - {player['value_score']:.2f}") + + with rec_col2: + # Position needs + affordable_players = df[df['estimated_salary'] <= available_cap] + position_counts = affordable_players['position'].value_counts() if 'position' in affordable_players.columns else pd.Series() + + st.info("**Available by Position**") + for pos, count in position_counts.head(5).items(): + st.write(f"• {pos}: {count} players") + + with rec_col3: + # Budget strategy + st.warning("**Budget Strategy**") + st.write(f"• Max signing: ${available_cap:.1f}M") + st.write(f"• Multiple targets: 2-3 players") + st.write(f"• Reserve: ${max(5, available_cap * 0.2):.1f}M") + else: + st.warning("No rating data available for contract analysis.") + +else: + st.info("Click 'Load Free Agent Data' to begin contract efficiency analysis.") + + # Show expected data structure + st.subheader("Expected Data Structure") + st.code(""" + Contract analysis requires: + - Player identification (first_name, last_name, position) + - Performance ratings (overall_rating, offensive_rating, defensive_rating) + - Potential rating for future value assessment + - Market value estimation based on performance + """) + +# Footer with key insights +st.markdown("---") +st.caption(f"Last updated: {datetime.now().strftime('%Y-%m-%d %H:%M')} | Available Cap Space: ${available_cap:.1f}M") \ No newline at end of file diff --git a/app/src/pages/99_About.py b/app/src/pages/99_About.py new file mode 100644 index 0000000000..e5c5614a5d --- /dev/null +++ b/app/src/pages/99_About.py @@ -0,0 +1,38 @@ +import streamlit as st +from streamlit_extras.app_logo import add_logo +from modules.nav import SideBarLinks + +SideBarLinks(show_home=True) + +st.write("# About this App") + +st.markdown ( + """ + This is our final project app for CS3200. + + BallWatch transforms NBA analytics into actionable insights for teams and fans. Our platform consolidates game statistics, player performance, and team dynamics into intuitive dashboards. Core features include real-time player stats tracking, lineup optimization analysis, and opponent scouting reports. We serve four key users: superfans seeking deeper insights, data engineers maintaining data integrity, coaches making strategic decisions, and GMs evaluating roster moves. By simplifying complex analytics, BallWatch bridges the gap between raw data and winning decisions. + + Please see our developers below: + """ + ) + +st.write("## Meet the Team") + +col1, col2, col3 = st.columns(3) + +with col1: + st.image("assets/wes.jpeg", caption="Wesley Chapman", width=200, use_container_width=True) + +with col2: + st.image("assets/drew.jpg", caption="Andrew Dubanowitz", width=200, use_container_width=True) + +with col3: + st.image("assets/vince.jpeg", caption="Vincent Schacknies", width=200, use_container_width=True) + +col4, col5 = st.columns(2) + +with col4: + st.image("assets/dishant.jpeg", caption="Dishant Budhi", width=200, use_container_width=True) + +with col5: + st.image("assets/frank.jpg", caption="Frank Healey", width=200, use_container_width=True) \ No newline at end of file diff --git a/database-files/00_northwind.sql b/database-files/00_northwind.sql deleted file mode 100644 index 57678cfc72..0000000000 --- a/database-files/00_northwind.sql +++ /dev/null @@ -1,546 +0,0 @@ -SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; -SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; -SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; - -DROP SCHEMA IF EXISTS `northwind` ; -CREATE SCHEMA IF NOT EXISTS `northwind` DEFAULT CHARACTER SET latin1 ; -USE `northwind` ; - --- ----------------------------------------------------- --- Table `northwind`.`customers` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`customers` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `company` VARCHAR(50) NULL DEFAULT NULL, - `last_name` VARCHAR(50) NULL DEFAULT NULL, - `first_name` VARCHAR(50) NULL DEFAULT NULL, - `email_address` VARCHAR(50) NULL DEFAULT NULL, - `job_title` VARCHAR(50) NULL DEFAULT NULL, - `business_phone` VARCHAR(25) NULL DEFAULT NULL, - `home_phone` VARCHAR(25) NULL DEFAULT NULL, - `mobile_phone` VARCHAR(25) NULL DEFAULT NULL, - `fax_number` VARCHAR(25) NULL DEFAULT NULL, - `address` LONGTEXT NULL DEFAULT NULL, - `city` VARCHAR(50) NULL DEFAULT NULL, - `state_province` VARCHAR(50) NULL DEFAULT NULL, - `zip_postal_code` VARCHAR(15) NULL DEFAULT NULL, - `country_region` VARCHAR(50) NULL DEFAULT NULL, - `web_page` LONGTEXT NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `city` (`city` ASC), - INDEX `company` (`company` ASC), - INDEX `first_name` (`first_name` ASC), - INDEX `last_name` (`last_name` ASC), - INDEX `zip_postal_code` (`zip_postal_code` ASC), - INDEX `state_province` (`state_province` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`employees` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`employees` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `company` VARCHAR(50) NULL DEFAULT NULL, - `last_name` VARCHAR(50) NULL DEFAULT NULL, - `first_name` VARCHAR(50) NULL DEFAULT NULL, - `email_address` VARCHAR(50) NULL DEFAULT NULL, - `job_title` VARCHAR(50) NULL DEFAULT NULL, - `business_phone` VARCHAR(25) NULL DEFAULT NULL, - `home_phone` VARCHAR(25) NULL DEFAULT NULL, - `mobile_phone` VARCHAR(25) NULL DEFAULT NULL, - `fax_number` VARCHAR(25) NULL DEFAULT NULL, - `address` LONGTEXT NULL DEFAULT NULL, - `city` VARCHAR(50) NULL DEFAULT NULL, - `state_province` VARCHAR(50) NULL DEFAULT NULL, - `zip_postal_code` VARCHAR(15) NULL DEFAULT NULL, - `country_region` VARCHAR(50) NULL DEFAULT NULL, - `web_page` LONGTEXT NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `city` (`city` ASC), - INDEX `company` (`company` ASC), - INDEX `first_name` (`first_name` ASC), - INDEX `last_name` (`last_name` ASC), - INDEX `zip_postal_code` (`zip_postal_code` ASC), - INDEX `state_province` (`state_province` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`privileges` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`privileges` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `privilege_name` VARCHAR(50) NULL DEFAULT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`employee_privileges` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`employee_privileges` ( - `employee_id` INT(11) NOT NULL, - `privilege_id` INT(11) NOT NULL, - PRIMARY KEY (`employee_id`, `privilege_id`), - INDEX `employee_id` (`employee_id` ASC), - INDEX `privilege_id` (`privilege_id` ASC), - INDEX `privilege_id_2` (`privilege_id` ASC), - CONSTRAINT `fk_employee_privileges_employees1` - FOREIGN KEY (`employee_id`) - REFERENCES `northwind`.`employees` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_employee_privileges_privileges1` - FOREIGN KEY (`privilege_id`) - REFERENCES `northwind`.`privileges` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`inventory_transaction_types` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`inventory_transaction_types` ( - `id` TINYINT(4) NOT NULL, - `type_name` VARCHAR(50) NOT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`shippers` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`shippers` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `company` VARCHAR(50) NULL DEFAULT NULL, - `last_name` VARCHAR(50) NULL DEFAULT NULL, - `first_name` VARCHAR(50) NULL DEFAULT NULL, - `email_address` VARCHAR(50) NULL DEFAULT NULL, - `job_title` VARCHAR(50) NULL DEFAULT NULL, - `business_phone` VARCHAR(25) NULL DEFAULT NULL, - `home_phone` VARCHAR(25) NULL DEFAULT NULL, - `mobile_phone` VARCHAR(25) NULL DEFAULT NULL, - `fax_number` VARCHAR(25) NULL DEFAULT NULL, - `address` LONGTEXT NULL DEFAULT NULL, - `city` VARCHAR(50) NULL DEFAULT NULL, - `state_province` VARCHAR(50) NULL DEFAULT NULL, - `zip_postal_code` VARCHAR(15) NULL DEFAULT NULL, - `country_region` VARCHAR(50) NULL DEFAULT NULL, - `web_page` LONGTEXT NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `city` (`city` ASC), - INDEX `company` (`company` ASC), - INDEX `first_name` (`first_name` ASC), - INDEX `last_name` (`last_name` ASC), - INDEX `zip_postal_code` (`zip_postal_code` ASC), - INDEX `state_province` (`state_province` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`orders_tax_status` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`orders_tax_status` ( - `id` TINYINT(4) NOT NULL, - `tax_status_name` VARCHAR(50) NOT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`orders_status` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`orders_status` ( - `id` TINYINT(4) NOT NULL, - `status_name` VARCHAR(50) NOT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`orders` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`orders` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `employee_id` INT(11) NULL DEFAULT NULL, - `customer_id` INT(11) NULL DEFAULT NULL, - `order_date` DATETIME NULL DEFAULT NULL, - `shipped_date` DATETIME NULL DEFAULT NULL, - `shipper_id` INT(11) NULL DEFAULT NULL, - `ship_name` VARCHAR(50) NULL DEFAULT NULL, - `ship_address` LONGTEXT NULL DEFAULT NULL, - `ship_city` VARCHAR(50) NULL DEFAULT NULL, - `ship_state_province` VARCHAR(50) NULL DEFAULT NULL, - `ship_zip_postal_code` VARCHAR(50) NULL DEFAULT NULL, - `ship_country_region` VARCHAR(50) NULL DEFAULT NULL, - `shipping_fee` DECIMAL(19,4) NULL DEFAULT '0.0000', - `taxes` DECIMAL(19,4) NULL DEFAULT '0.0000', - `payment_type` VARCHAR(50) NULL DEFAULT NULL, - `paid_date` DATETIME NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `tax_rate` DOUBLE NULL DEFAULT '0', - `tax_status_id` TINYINT(4) NULL DEFAULT NULL, - `status_id` TINYINT(4) NULL DEFAULT '0', - PRIMARY KEY (`id`), - INDEX `customer_id` (`customer_id` ASC), - INDEX `customer_id_2` (`customer_id` ASC), - INDEX `employee_id` (`employee_id` ASC), - INDEX `employee_id_2` (`employee_id` ASC), - INDEX `id` (`id` ASC), - INDEX `id_2` (`id` ASC), - INDEX `shipper_id` (`shipper_id` ASC), - INDEX `shipper_id_2` (`shipper_id` ASC), - INDEX `id_3` (`id` ASC), - INDEX `tax_status` (`tax_status_id` ASC), - INDEX `ship_zip_postal_code` (`ship_zip_postal_code` ASC), - CONSTRAINT `fk_orders_customers` - FOREIGN KEY (`customer_id`) - REFERENCES `northwind`.`customers` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_orders_employees1` - FOREIGN KEY (`employee_id`) - REFERENCES `northwind`.`employees` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_orders_shippers1` - FOREIGN KEY (`shipper_id`) - REFERENCES `northwind`.`shippers` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_orders_orders_tax_status1` - FOREIGN KEY (`tax_status_id`) - REFERENCES `northwind`.`orders_tax_status` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_orders_orders_status1` - FOREIGN KEY (`status_id`) - REFERENCES `northwind`.`orders_status` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`products` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`products` ( - `supplier_ids` LONGTEXT NULL DEFAULT NULL, - `id` INT(11) NOT NULL AUTO_INCREMENT, - `product_code` VARCHAR(25) NULL DEFAULT NULL, - `product_name` VARCHAR(50) NULL DEFAULT NULL, - `description` LONGTEXT NULL DEFAULT NULL, - `standard_cost` DECIMAL(19,4) NULL DEFAULT '0.0000', - `list_price` DECIMAL(19,4) NOT NULL DEFAULT '0.0000', - `reorder_level` INT(11) NULL DEFAULT NULL, - `target_level` INT(11) NULL DEFAULT NULL, - `quantity_per_unit` VARCHAR(50) NULL DEFAULT NULL, - `discontinued` TINYINT(1) NOT NULL DEFAULT '0', - `minimum_reorder_quantity` INT(11) NULL DEFAULT NULL, - `category` VARCHAR(50) NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `product_code` (`product_code` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`purchase_order_status` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`purchase_order_status` ( - `id` INT(11) NOT NULL, - `status` VARCHAR(50) NULL DEFAULT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`suppliers` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`suppliers` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `company` VARCHAR(50) NULL DEFAULT NULL, - `last_name` VARCHAR(50) NULL DEFAULT NULL, - `first_name` VARCHAR(50) NULL DEFAULT NULL, - `email_address` VARCHAR(50) NULL DEFAULT NULL, - `job_title` VARCHAR(50) NULL DEFAULT NULL, - `business_phone` VARCHAR(25) NULL DEFAULT NULL, - `home_phone` VARCHAR(25) NULL DEFAULT NULL, - `mobile_phone` VARCHAR(25) NULL DEFAULT NULL, - `fax_number` VARCHAR(25) NULL DEFAULT NULL, - `address` LONGTEXT NULL DEFAULT NULL, - `city` VARCHAR(50) NULL DEFAULT NULL, - `state_province` VARCHAR(50) NULL DEFAULT NULL, - `zip_postal_code` VARCHAR(15) NULL DEFAULT NULL, - `country_region` VARCHAR(50) NULL DEFAULT NULL, - `web_page` LONGTEXT NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `city` (`city` ASC), - INDEX `company` (`company` ASC), - INDEX `first_name` (`first_name` ASC), - INDEX `last_name` (`last_name` ASC), - INDEX `zip_postal_code` (`zip_postal_code` ASC), - INDEX `state_province` (`state_province` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`purchase_orders` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`purchase_orders` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `supplier_id` INT(11) NULL DEFAULT NULL, - `created_by` INT(11) NULL DEFAULT NULL, - `submitted_date` DATETIME NULL DEFAULT NULL, - `creation_date` DATETIME NULL DEFAULT NULL, - `status_id` INT(11) NULL DEFAULT '0', - `expected_date` DATETIME NULL DEFAULT NULL, - `shipping_fee` DECIMAL(19,4) NOT NULL DEFAULT '0.0000', - `taxes` DECIMAL(19,4) NOT NULL DEFAULT '0.0000', - `payment_date` DATETIME NULL DEFAULT NULL, - `payment_amount` DECIMAL(19,4) NULL DEFAULT '0.0000', - `payment_method` VARCHAR(50) NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `approved_by` INT(11) NULL DEFAULT NULL, - `approved_date` DATETIME NULL DEFAULT NULL, - `submitted_by` INT(11) NULL DEFAULT NULL, - PRIMARY KEY (`id`), - UNIQUE INDEX `id` (`id` ASC), - INDEX `created_by` (`created_by` ASC), - INDEX `status_id` (`status_id` ASC), - INDEX `id_2` (`id` ASC), - INDEX `supplier_id` (`supplier_id` ASC), - INDEX `supplier_id_2` (`supplier_id` ASC), - CONSTRAINT `fk_purchase_orders_employees1` - FOREIGN KEY (`created_by`) - REFERENCES `northwind`.`employees` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_purchase_orders_purchase_order_status1` - FOREIGN KEY (`status_id`) - REFERENCES `northwind`.`purchase_order_status` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_purchase_orders_suppliers1` - FOREIGN KEY (`supplier_id`) - REFERENCES `northwind`.`suppliers` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`inventory_transactions` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`inventory_transactions` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `transaction_type` TINYINT(4) NOT NULL, - `transaction_created_date` DATETIME NULL DEFAULT NULL, - `transaction_modified_date` DATETIME NULL DEFAULT NULL, - `product_id` INT(11) NOT NULL, - `quantity` INT(11) NOT NULL, - `purchase_order_id` INT(11) NULL DEFAULT NULL, - `customer_order_id` INT(11) NULL DEFAULT NULL, - `comments` VARCHAR(255) NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `customer_order_id` (`customer_order_id` ASC), - INDEX `customer_order_id_2` (`customer_order_id` ASC), - INDEX `product_id` (`product_id` ASC), - INDEX `product_id_2` (`product_id` ASC), - INDEX `purchase_order_id` (`purchase_order_id` ASC), - INDEX `purchase_order_id_2` (`purchase_order_id` ASC), - INDEX `transaction_type` (`transaction_type` ASC), - CONSTRAINT `fk_inventory_transactions_orders1` - FOREIGN KEY (`customer_order_id`) - REFERENCES `northwind`.`orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_inventory_transactions_products1` - FOREIGN KEY (`product_id`) - REFERENCES `northwind`.`products` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_inventory_transactions_purchase_orders1` - FOREIGN KEY (`purchase_order_id`) - REFERENCES `northwind`.`purchase_orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_inventory_transactions_inventory_transaction_types1` - FOREIGN KEY (`transaction_type`) - REFERENCES `northwind`.`inventory_transaction_types` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`invoices` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`invoices` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `order_id` INT(11) NULL DEFAULT NULL, - `invoice_date` DATETIME NULL DEFAULT NULL, - `due_date` DATETIME NULL DEFAULT NULL, - `tax` DECIMAL(19,4) NULL DEFAULT '0.0000', - `shipping` DECIMAL(19,4) NULL DEFAULT '0.0000', - `amount_due` DECIMAL(19,4) NULL DEFAULT '0.0000', - PRIMARY KEY (`id`), - INDEX `id` (`id` ASC), - INDEX `id_2` (`id` ASC), - INDEX `fk_invoices_orders1_idx` (`order_id` ASC), - CONSTRAINT `fk_invoices_orders1` - FOREIGN KEY (`order_id`) - REFERENCES `northwind`.`orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`order_details_status` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`order_details_status` ( - `id` INT(11) NOT NULL, - `status_name` VARCHAR(50) NOT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`order_details` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`order_details` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `order_id` INT(11) NOT NULL, - `product_id` INT(11) NULL DEFAULT NULL, - `quantity` DECIMAL(18,4) NOT NULL DEFAULT '0.0000', - `unit_price` DECIMAL(19,4) NULL DEFAULT '0.0000', - `discount` DOUBLE NOT NULL DEFAULT '0', - `status_id` INT(11) NULL DEFAULT NULL, - `date_allocated` DATETIME NULL DEFAULT NULL, - `purchase_order_id` INT(11) NULL DEFAULT NULL, - `inventory_id` INT(11) NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `id` (`id` ASC), - INDEX `inventory_id` (`inventory_id` ASC), - INDEX `id_2` (`id` ASC), - INDEX `id_3` (`id` ASC), - INDEX `id_4` (`id` ASC), - INDEX `product_id` (`product_id` ASC), - INDEX `product_id_2` (`product_id` ASC), - INDEX `purchase_order_id` (`purchase_order_id` ASC), - INDEX `id_5` (`id` ASC), - INDEX `fk_order_details_orders1_idx` (`order_id` ASC), - INDEX `fk_order_details_order_details_status1_idx` (`status_id` ASC), - CONSTRAINT `fk_order_details_orders1` - FOREIGN KEY (`order_id`) - REFERENCES `northwind`.`orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_order_details_products1` - FOREIGN KEY (`product_id`) - REFERENCES `northwind`.`products` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_order_details_order_details_status1` - FOREIGN KEY (`status_id`) - REFERENCES `northwind`.`order_details_status` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`purchase_order_details` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`purchase_order_details` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `purchase_order_id` INT(11) NOT NULL, - `product_id` INT(11) NULL DEFAULT NULL, - `quantity` DECIMAL(18,4) NOT NULL, - `unit_cost` DECIMAL(19,4) NOT NULL, - `date_received` DATETIME NULL DEFAULT NULL, - `posted_to_inventory` TINYINT(1) NOT NULL DEFAULT '0', - `inventory_id` INT(11) NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `id` (`id` ASC), - INDEX `inventory_id` (`inventory_id` ASC), - INDEX `inventory_id_2` (`inventory_id` ASC), - INDEX `purchase_order_id` (`purchase_order_id` ASC), - INDEX `product_id` (`product_id` ASC), - INDEX `product_id_2` (`product_id` ASC), - INDEX `purchase_order_id_2` (`purchase_order_id` ASC), - CONSTRAINT `fk_purchase_order_details_inventory_transactions1` - FOREIGN KEY (`inventory_id`) - REFERENCES `northwind`.`inventory_transactions` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_purchase_order_details_products1` - FOREIGN KEY (`product_id`) - REFERENCES `northwind`.`products` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_purchase_order_details_purchase_orders1` - FOREIGN KEY (`purchase_order_id`) - REFERENCES `northwind`.`purchase_orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`sales_reports` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`sales_reports` ( - `group_by` VARCHAR(50) NOT NULL, - `display` VARCHAR(50) NULL DEFAULT NULL, - `title` VARCHAR(50) NULL DEFAULT NULL, - `filter_row_source` LONGTEXT NULL DEFAULT NULL, - `default` TINYINT(1) NOT NULL DEFAULT '0', - PRIMARY KEY (`group_by`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`strings` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`strings` ( - `string_id` INT(11) NOT NULL AUTO_INCREMENT, - `string_data` VARCHAR(255) NULL DEFAULT NULL, - PRIMARY KEY (`string_id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - -SET SQL_MODE=@OLD_SQL_MODE; -SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; -SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; diff --git a/database-files/01_northwind-default-current-timestamp.sql b/database-files/01_northwind-default-current-timestamp.sql deleted file mode 100644 index 5596e4759c..0000000000 --- a/database-files/01_northwind-default-current-timestamp.sql +++ /dev/null @@ -1,546 +0,0 @@ -SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; -SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; -SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; - -DROP SCHEMA IF EXISTS `northwind` ; -CREATE SCHEMA IF NOT EXISTS `northwind` DEFAULT CHARACTER SET latin1 ; -USE `northwind` ; - --- ----------------------------------------------------- --- Table `northwind`.`customers` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`customers` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `company` VARCHAR(50) NULL DEFAULT NULL, - `last_name` VARCHAR(50) NULL DEFAULT NULL, - `first_name` VARCHAR(50) NULL DEFAULT NULL, - `email_address` VARCHAR(50) NULL DEFAULT NULL, - `job_title` VARCHAR(50) NULL DEFAULT NULL, - `business_phone` VARCHAR(25) NULL DEFAULT NULL, - `home_phone` VARCHAR(25) NULL DEFAULT NULL, - `mobile_phone` VARCHAR(25) NULL DEFAULT NULL, - `fax_number` VARCHAR(25) NULL DEFAULT NULL, - `address` LONGTEXT NULL DEFAULT NULL, - `city` VARCHAR(50) NULL DEFAULT NULL, - `state_province` VARCHAR(50) NULL DEFAULT NULL, - `zip_postal_code` VARCHAR(15) NULL DEFAULT NULL, - `country_region` VARCHAR(50) NULL DEFAULT NULL, - `web_page` LONGTEXT NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `city` (`city` ASC), - INDEX `company` (`company` ASC), - INDEX `first_name` (`first_name` ASC), - INDEX `last_name` (`last_name` ASC), - INDEX `zip_postal_code` (`zip_postal_code` ASC), - INDEX `state_province` (`state_province` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`employees` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`employees` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `company` VARCHAR(50) NULL DEFAULT NULL, - `last_name` VARCHAR(50) NULL DEFAULT NULL, - `first_name` VARCHAR(50) NULL DEFAULT NULL, - `email_address` VARCHAR(50) NULL DEFAULT NULL, - `job_title` VARCHAR(50) NULL DEFAULT NULL, - `business_phone` VARCHAR(25) NULL DEFAULT NULL, - `home_phone` VARCHAR(25) NULL DEFAULT NULL, - `mobile_phone` VARCHAR(25) NULL DEFAULT NULL, - `fax_number` VARCHAR(25) NULL DEFAULT NULL, - `address` LONGTEXT NULL DEFAULT NULL, - `city` VARCHAR(50) NULL DEFAULT NULL, - `state_province` VARCHAR(50) NULL DEFAULT NULL, - `zip_postal_code` VARCHAR(15) NULL DEFAULT NULL, - `country_region` VARCHAR(50) NULL DEFAULT NULL, - `web_page` LONGTEXT NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `city` (`city` ASC), - INDEX `company` (`company` ASC), - INDEX `first_name` (`first_name` ASC), - INDEX `last_name` (`last_name` ASC), - INDEX `zip_postal_code` (`zip_postal_code` ASC), - INDEX `state_province` (`state_province` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`privileges` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`privileges` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `privilege_name` VARCHAR(50) NULL DEFAULT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`employee_privileges` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`employee_privileges` ( - `employee_id` INT(11) NOT NULL, - `privilege_id` INT(11) NOT NULL, - PRIMARY KEY (`employee_id`, `privilege_id`), - INDEX `employee_id` (`employee_id` ASC), - INDEX `privilege_id` (`privilege_id` ASC), - INDEX `privilege_id_2` (`privilege_id` ASC), - CONSTRAINT `fk_employee_privileges_employees1` - FOREIGN KEY (`employee_id`) - REFERENCES `northwind`.`employees` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_employee_privileges_privileges1` - FOREIGN KEY (`privilege_id`) - REFERENCES `northwind`.`privileges` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`inventory_transaction_types` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`inventory_transaction_types` ( - `id` TINYINT(4) NOT NULL, - `type_name` VARCHAR(50) NOT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`shippers` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`shippers` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `company` VARCHAR(50) NULL DEFAULT NULL, - `last_name` VARCHAR(50) NULL DEFAULT NULL, - `first_name` VARCHAR(50) NULL DEFAULT NULL, - `email_address` VARCHAR(50) NULL DEFAULT NULL, - `job_title` VARCHAR(50) NULL DEFAULT NULL, - `business_phone` VARCHAR(25) NULL DEFAULT NULL, - `home_phone` VARCHAR(25) NULL DEFAULT NULL, - `mobile_phone` VARCHAR(25) NULL DEFAULT NULL, - `fax_number` VARCHAR(25) NULL DEFAULT NULL, - `address` LONGTEXT NULL DEFAULT NULL, - `city` VARCHAR(50) NULL DEFAULT NULL, - `state_province` VARCHAR(50) NULL DEFAULT NULL, - `zip_postal_code` VARCHAR(15) NULL DEFAULT NULL, - `country_region` VARCHAR(50) NULL DEFAULT NULL, - `web_page` LONGTEXT NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `city` (`city` ASC), - INDEX `company` (`company` ASC), - INDEX `first_name` (`first_name` ASC), - INDEX `last_name` (`last_name` ASC), - INDEX `zip_postal_code` (`zip_postal_code` ASC), - INDEX `state_province` (`state_province` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`orders_tax_status` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`orders_tax_status` ( - `id` TINYINT(4) NOT NULL, - `tax_status_name` VARCHAR(50) NOT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`orders_status` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`orders_status` ( - `id` TINYINT(4) NOT NULL, - `status_name` VARCHAR(50) NOT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`orders` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`orders` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `employee_id` INT(11) NULL DEFAULT NULL, - `customer_id` INT(11) NULL DEFAULT NULL, - `order_date` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - `shipped_date` DATETIME NULL DEFAULT NULL, - `shipper_id` INT(11) NULL DEFAULT NULL, - `ship_name` VARCHAR(50) NULL DEFAULT NULL, - `ship_address` LONGTEXT NULL DEFAULT NULL, - `ship_city` VARCHAR(50) NULL DEFAULT NULL, - `ship_state_province` VARCHAR(50) NULL DEFAULT NULL, - `ship_zip_postal_code` VARCHAR(50) NULL DEFAULT NULL, - `ship_country_region` VARCHAR(50) NULL DEFAULT NULL, - `shipping_fee` DECIMAL(19,4) NULL DEFAULT '0.0000', - `taxes` DECIMAL(19,4) NULL DEFAULT '0.0000', - `payment_type` VARCHAR(50) NULL DEFAULT NULL, - `paid_date` DATETIME NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `tax_rate` DOUBLE NULL DEFAULT '0', - `tax_status_id` TINYINT(4) NULL DEFAULT NULL, - `status_id` TINYINT(4) NULL DEFAULT '0', - PRIMARY KEY (`id`), - INDEX `customer_id` (`customer_id` ASC), - INDEX `customer_id_2` (`customer_id` ASC), - INDEX `employee_id` (`employee_id` ASC), - INDEX `employee_id_2` (`employee_id` ASC), - INDEX `id` (`id` ASC), - INDEX `id_2` (`id` ASC), - INDEX `shipper_id` (`shipper_id` ASC), - INDEX `shipper_id_2` (`shipper_id` ASC), - INDEX `id_3` (`id` ASC), - INDEX `tax_status` (`tax_status_id` ASC), - INDEX `ship_zip_postal_code` (`ship_zip_postal_code` ASC), - CONSTRAINT `fk_orders_customers` - FOREIGN KEY (`customer_id`) - REFERENCES `northwind`.`customers` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_orders_employees1` - FOREIGN KEY (`employee_id`) - REFERENCES `northwind`.`employees` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_orders_shippers1` - FOREIGN KEY (`shipper_id`) - REFERENCES `northwind`.`shippers` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_orders_orders_tax_status1` - FOREIGN KEY (`tax_status_id`) - REFERENCES `northwind`.`orders_tax_status` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_orders_orders_status1` - FOREIGN KEY (`status_id`) - REFERENCES `northwind`.`orders_status` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`products` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`products` ( - `supplier_ids` LONGTEXT NULL DEFAULT NULL, - `id` INT(11) NOT NULL AUTO_INCREMENT, - `product_code` VARCHAR(25) NULL DEFAULT NULL, - `product_name` VARCHAR(50) NULL DEFAULT NULL, - `description` LONGTEXT NULL DEFAULT NULL, - `standard_cost` DECIMAL(19,4) NULL DEFAULT '0.0000', - `list_price` DECIMAL(19,4) NOT NULL DEFAULT '0.0000', - `reorder_level` INT(11) NULL DEFAULT NULL, - `target_level` INT(11) NULL DEFAULT NULL, - `quantity_per_unit` VARCHAR(50) NULL DEFAULT NULL, - `discontinued` TINYINT(1) NOT NULL DEFAULT '0', - `minimum_reorder_quantity` INT(11) NULL DEFAULT NULL, - `category` VARCHAR(50) NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `product_code` (`product_code` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`purchase_order_status` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`purchase_order_status` ( - `id` INT(11) NOT NULL, - `status` VARCHAR(50) NULL DEFAULT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`suppliers` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`suppliers` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `company` VARCHAR(50) NULL DEFAULT NULL, - `last_name` VARCHAR(50) NULL DEFAULT NULL, - `first_name` VARCHAR(50) NULL DEFAULT NULL, - `email_address` VARCHAR(50) NULL DEFAULT NULL, - `job_title` VARCHAR(50) NULL DEFAULT NULL, - `business_phone` VARCHAR(25) NULL DEFAULT NULL, - `home_phone` VARCHAR(25) NULL DEFAULT NULL, - `mobile_phone` VARCHAR(25) NULL DEFAULT NULL, - `fax_number` VARCHAR(25) NULL DEFAULT NULL, - `address` LONGTEXT NULL DEFAULT NULL, - `city` VARCHAR(50) NULL DEFAULT NULL, - `state_province` VARCHAR(50) NULL DEFAULT NULL, - `zip_postal_code` VARCHAR(15) NULL DEFAULT NULL, - `country_region` VARCHAR(50) NULL DEFAULT NULL, - `web_page` LONGTEXT NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `attachments` LONGBLOB NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `city` (`city` ASC), - INDEX `company` (`company` ASC), - INDEX `first_name` (`first_name` ASC), - INDEX `last_name` (`last_name` ASC), - INDEX `zip_postal_code` (`zip_postal_code` ASC), - INDEX `state_province` (`state_province` ASC)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`purchase_orders` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`purchase_orders` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `supplier_id` INT(11) NULL DEFAULT NULL, - `created_by` INT(11) NULL DEFAULT NULL, - `submitted_date` DATETIME NULL DEFAULT NULL, - `creation_date` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - `status_id` INT(11) NULL DEFAULT '0', - `expected_date` DATETIME NULL DEFAULT NULL, - `shipping_fee` DECIMAL(19,4) NOT NULL DEFAULT '0.0000', - `taxes` DECIMAL(19,4) NOT NULL DEFAULT '0.0000', - `payment_date` DATETIME NULL DEFAULT NULL, - `payment_amount` DECIMAL(19,4) NULL DEFAULT '0.0000', - `payment_method` VARCHAR(50) NULL DEFAULT NULL, - `notes` LONGTEXT NULL DEFAULT NULL, - `approved_by` INT(11) NULL DEFAULT NULL, - `approved_date` DATETIME NULL DEFAULT NULL, - `submitted_by` INT(11) NULL DEFAULT NULL, - PRIMARY KEY (`id`), - UNIQUE INDEX `id` (`id` ASC), - INDEX `created_by` (`created_by` ASC), - INDEX `status_id` (`status_id` ASC), - INDEX `id_2` (`id` ASC), - INDEX `supplier_id` (`supplier_id` ASC), - INDEX `supplier_id_2` (`supplier_id` ASC), - CONSTRAINT `fk_purchase_orders_employees1` - FOREIGN KEY (`created_by`) - REFERENCES `northwind`.`employees` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_purchase_orders_purchase_order_status1` - FOREIGN KEY (`status_id`) - REFERENCES `northwind`.`purchase_order_status` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_purchase_orders_suppliers1` - FOREIGN KEY (`supplier_id`) - REFERENCES `northwind`.`suppliers` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`inventory_transactions` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`inventory_transactions` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `transaction_type` TINYINT(4) NOT NULL, - `transaction_created_date` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - `transaction_modified_date` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - `product_id` INT(11) NOT NULL, - `quantity` INT(11) NOT NULL, - `purchase_order_id` INT(11) NULL DEFAULT NULL, - `customer_order_id` INT(11) NULL DEFAULT NULL, - `comments` VARCHAR(255) NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `customer_order_id` (`customer_order_id` ASC), - INDEX `customer_order_id_2` (`customer_order_id` ASC), - INDEX `product_id` (`product_id` ASC), - INDEX `product_id_2` (`product_id` ASC), - INDEX `purchase_order_id` (`purchase_order_id` ASC), - INDEX `purchase_order_id_2` (`purchase_order_id` ASC), - INDEX `transaction_type` (`transaction_type` ASC), - CONSTRAINT `fk_inventory_transactions_orders1` - FOREIGN KEY (`customer_order_id`) - REFERENCES `northwind`.`orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_inventory_transactions_products1` - FOREIGN KEY (`product_id`) - REFERENCES `northwind`.`products` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_inventory_transactions_purchase_orders1` - FOREIGN KEY (`purchase_order_id`) - REFERENCES `northwind`.`purchase_orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_inventory_transactions_inventory_transaction_types1` - FOREIGN KEY (`transaction_type`) - REFERENCES `northwind`.`inventory_transaction_types` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`invoices` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`invoices` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `order_id` INT(11) NULL DEFAULT NULL, - `invoice_date` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - `due_date` DATETIME NULL DEFAULT NULL, - `tax` DECIMAL(19,4) NULL DEFAULT '0.0000', - `shipping` DECIMAL(19,4) NULL DEFAULT '0.0000', - `amount_due` DECIMAL(19,4) NULL DEFAULT '0.0000', - PRIMARY KEY (`id`), - INDEX `id` (`id` ASC), - INDEX `id_2` (`id` ASC), - INDEX `fk_invoices_orders1_idx` (`order_id` ASC), - CONSTRAINT `fk_invoices_orders1` - FOREIGN KEY (`order_id`) - REFERENCES `northwind`.`orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`order_details_status` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`order_details_status` ( - `id` INT(11) NOT NULL, - `status_name` VARCHAR(50) NOT NULL, - PRIMARY KEY (`id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`order_details` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`order_details` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `order_id` INT(11) NOT NULL, - `product_id` INT(11) NULL DEFAULT NULL, - `quantity` DECIMAL(18,4) NOT NULL DEFAULT '0.0000', - `unit_price` DECIMAL(19,4) NULL DEFAULT '0.0000', - `discount` DOUBLE NOT NULL DEFAULT '0', - `status_id` INT(11) NULL DEFAULT NULL, - `date_allocated` DATETIME NULL DEFAULT NULL, - `purchase_order_id` INT(11) NULL DEFAULT NULL, - `inventory_id` INT(11) NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `id` (`id` ASC), - INDEX `inventory_id` (`inventory_id` ASC), - INDEX `id_2` (`id` ASC), - INDEX `id_3` (`id` ASC), - INDEX `id_4` (`id` ASC), - INDEX `product_id` (`product_id` ASC), - INDEX `product_id_2` (`product_id` ASC), - INDEX `purchase_order_id` (`purchase_order_id` ASC), - INDEX `id_5` (`id` ASC), - INDEX `fk_order_details_orders1_idx` (`order_id` ASC), - INDEX `fk_order_details_order_details_status1_idx` (`status_id` ASC), - CONSTRAINT `fk_order_details_orders1` - FOREIGN KEY (`order_id`) - REFERENCES `northwind`.`orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_order_details_products1` - FOREIGN KEY (`product_id`) - REFERENCES `northwind`.`products` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_order_details_order_details_status1` - FOREIGN KEY (`status_id`) - REFERENCES `northwind`.`order_details_status` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`purchase_order_details` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`purchase_order_details` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `purchase_order_id` INT(11) NOT NULL, - `product_id` INT(11) NULL DEFAULT NULL, - `quantity` DECIMAL(18,4) NOT NULL, - `unit_cost` DECIMAL(19,4) NOT NULL, - `date_received` DATETIME NULL DEFAULT NULL, - `posted_to_inventory` TINYINT(1) NOT NULL DEFAULT '0', - `inventory_id` INT(11) NULL DEFAULT NULL, - PRIMARY KEY (`id`), - INDEX `id` (`id` ASC), - INDEX `inventory_id` (`inventory_id` ASC), - INDEX `inventory_id_2` (`inventory_id` ASC), - INDEX `purchase_order_id` (`purchase_order_id` ASC), - INDEX `product_id` (`product_id` ASC), - INDEX `product_id_2` (`product_id` ASC), - INDEX `purchase_order_id_2` (`purchase_order_id` ASC), - CONSTRAINT `fk_purchase_order_details_inventory_transactions1` - FOREIGN KEY (`inventory_id`) - REFERENCES `northwind`.`inventory_transactions` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_purchase_order_details_products1` - FOREIGN KEY (`product_id`) - REFERENCES `northwind`.`products` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION, - CONSTRAINT `fk_purchase_order_details_purchase_orders1` - FOREIGN KEY (`purchase_order_id`) - REFERENCES `northwind`.`purchase_orders` (`id`) - ON DELETE NO ACTION - ON UPDATE NO ACTION) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`sales_reports` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`sales_reports` ( - `group_by` VARCHAR(50) NOT NULL, - `display` VARCHAR(50) NULL DEFAULT NULL, - `title` VARCHAR(50) NULL DEFAULT NULL, - `filter_row_source` LONGTEXT NULL DEFAULT NULL, - `default` TINYINT(1) NOT NULL DEFAULT '0', - PRIMARY KEY (`group_by`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - --- ----------------------------------------------------- --- Table `northwind`.`strings` --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS `northwind`.`strings` ( - `string_id` INT(11) NOT NULL AUTO_INCREMENT, - `string_data` VARCHAR(255) NULL DEFAULT NULL, - PRIMARY KEY (`string_id`)) -ENGINE = InnoDB -DEFAULT CHARACTER SET = utf8; - - -SET SQL_MODE=@OLD_SQL_MODE; -SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; -SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; diff --git a/database-files/02_northwind-data.sql b/database-files/02_northwind-data.sql deleted file mode 100644 index e4477299ac..0000000000 --- a/database-files/02_northwind-data.sql +++ /dev/null @@ -1,654 +0,0 @@ -# -# Converted from MS Access 2010 Northwind database (northwind.accdb) using -# Bullzip MS Access to MySQL Version 5.1.242. http://www.bullzip.com -# -# CHANGES MADE AFTER INITIAL CONVERSION -# * column and row names in CamelCase converted to lower_case_with_underscore -# * space and slash ("/") in table and column names replaced with _underscore_ -# * id column names converted to "id" -# * foreign key column names converted to xxx_id -# * variables of type TIMESTAMP converted to DATETIME to avoid TIMESTAMP -# range limitation (1997 - 2038 UTC), and other limitations. -# * unique and foreign key checks disabled while loading data -# -#------------------------------------------------------------------ -# - -SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; -SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; - -USE `northwind`; - -# -# Dumping data for table 'customers' -# - -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (1, 'Company A', 'Bedecs', 'Anna', NULL, 'Owner', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 1st Street', 'Seattle', 'WA', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (2, 'Company B', 'Gratacos Solsona', 'Antonio', NULL, 'Owner', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 2nd Street', 'Boston', 'MA', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (3, 'Company C', 'Axen', 'Thomas', NULL, 'Purchasing Representative', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 3rd Street', 'Los Angelas', 'CA', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (4, 'Company D', 'Lee', 'Christina', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 4th Street', 'New York', 'NY', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (5, 'Company E', 'O’Donnell', 'Martin', NULL, 'Owner', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 5th Street', 'Minneapolis', 'MN', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (6, 'Company F', 'Pérez-Olaeta', 'Francisco', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 6th Street', 'Milwaukee', 'WI', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (7, 'Company G', 'Xie', 'Ming-Yang', NULL, 'Owner', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 7th Street', 'Boise', 'ID', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (8, 'Company H', 'Andersen', 'Elizabeth', NULL, 'Purchasing Representative', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 8th Street', 'Portland', 'OR', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (9, 'Company I', 'Mortensen', 'Sven', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 9th Street', 'Salt Lake City', 'UT', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (10, 'Company J', 'Wacker', 'Roland', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 10th Street', 'Chicago', 'IL', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (11, 'Company K', 'Krschne', 'Peter', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 11th Street', 'Miami', 'FL', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (12, 'Company L', 'Edwards', 'John', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '123 12th Street', 'Las Vegas', 'NV', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (13, 'Company M', 'Ludick', 'Andre', NULL, 'Purchasing Representative', '(123)555-0100', NULL, NULL, '(123)555-0101', '456 13th Street', 'Memphis', 'TN', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (14, 'Company N', 'Grilo', 'Carlos', NULL, 'Purchasing Representative', '(123)555-0100', NULL, NULL, '(123)555-0101', '456 14th Street', 'Denver', 'CO', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (15, 'Company O', 'Kupkova', 'Helena', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '456 15th Street', 'Honolulu', 'HI', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (16, 'Company P', 'Goldschmidt', 'Daniel', NULL, 'Purchasing Representative', '(123)555-0100', NULL, NULL, '(123)555-0101', '456 16th Street', 'San Francisco', 'CA', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (17, 'Company Q', 'Bagel', 'Jean Philippe', NULL, 'Owner', '(123)555-0100', NULL, NULL, '(123)555-0101', '456 17th Street', 'Seattle', 'WA', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (18, 'Company R', 'Autier Miconi', 'Catherine', NULL, 'Purchasing Representative', '(123)555-0100', NULL, NULL, '(123)555-0101', '456 18th Street', 'Boston', 'MA', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (19, 'Company S', 'Eggerer', 'Alexander', NULL, 'Accounting Assistant', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 19th Street', 'Los Angelas', 'CA', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (20, 'Company T', 'Li', 'George', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 20th Street', 'New York', 'NY', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (21, 'Company U', 'Tham', 'Bernard', NULL, 'Accounting Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 21th Street', 'Minneapolis', 'MN', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (22, 'Company V', 'Ramos', 'Luciana', NULL, 'Purchasing Assistant', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 22th Street', 'Milwaukee', 'WI', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (23, 'Company W', 'Entin', 'Michael', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 23th Street', 'Portland', 'OR', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (24, 'Company X', 'Hasselberg', 'Jonas', NULL, 'Owner', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 24th Street', 'Salt Lake City', 'UT', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (25, 'Company Y', 'Rodman', 'John', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 25th Street', 'Chicago', 'IL', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (26, 'Company Z', 'Liu', 'Run', NULL, 'Accounting Assistant', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 26th Street', 'Miami', 'FL', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (27, 'Company AA', 'Toh', 'Karen', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 27th Street', 'Las Vegas', 'NV', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (28, 'Company BB', 'Raghav', 'Amritansh', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 28th Street', 'Memphis', 'TN', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `customers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (29, 'Company CC', 'Lee', 'Soo Jung', NULL, 'Purchasing Manager', '(123)555-0100', NULL, NULL, '(123)555-0101', '789 29th Street', 'Denver', 'CO', '99999', 'USA', NULL, NULL, ''); -# 29 records - -# -# Dumping data for table 'employee_privileges' -# - -INSERT INTO `employee_privileges` (`employee_id`, `privilege_id`) VALUES (2, 2); -# 1 records - -# -# Dumping data for table 'employees' -# - -INSERT INTO `employees` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (1, 'Northwind Traders', 'Freehafer', 'Nancy', 'nancy@northwindtraders.com', 'Sales Representative', '(123)555-0100', '(123)555-0102', NULL, '(123)555-0103', '123 1st Avenue', 'Seattle', 'WA', '99999', 'USA', '#http://northwindtraders.com#', NULL, ''); -INSERT INTO `employees` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (2, 'Northwind Traders', 'Cencini', 'Andrew', 'andrew@northwindtraders.com', 'Vice President, Sales', '(123)555-0100', '(123)555-0102', NULL, '(123)555-0103', '123 2nd Avenue', 'Bellevue', 'WA', '99999', 'USA', 'http://northwindtraders.com#http://northwindtraders.com/#', 'Joined the company as a sales representative, was promoted to sales manager and was then named vice president of sales.', ''); -INSERT INTO `employees` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (3, 'Northwind Traders', 'Kotas', 'Jan', 'jan@northwindtraders.com', 'Sales Representative', '(123)555-0100', '(123)555-0102', NULL, '(123)555-0103', '123 3rd Avenue', 'Redmond', 'WA', '99999', 'USA', 'http://northwindtraders.com#http://northwindtraders.com/#', 'Was hired as a sales associate and was promoted to sales representative.', ''); -INSERT INTO `employees` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (4, 'Northwind Traders', 'Sergienko', 'Mariya', 'mariya@northwindtraders.com', 'Sales Representative', '(123)555-0100', '(123)555-0102', NULL, '(123)555-0103', '123 4th Avenue', 'Kirkland', 'WA', '99999', 'USA', 'http://northwindtraders.com#http://northwindtraders.com/#', NULL, ''); -INSERT INTO `employees` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (5, 'Northwind Traders', 'Thorpe', 'Steven', 'steven@northwindtraders.com', 'Sales Manager', '(123)555-0100', '(123)555-0102', NULL, '(123)555-0103', '123 5th Avenue', 'Seattle', 'WA', '99999', 'USA', 'http://northwindtraders.com#http://northwindtraders.com/#', 'Joined the company as a sales representative and was promoted to sales manager. Fluent in French.', ''); -INSERT INTO `employees` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (6, 'Northwind Traders', 'Neipper', 'Michael', 'michael@northwindtraders.com', 'Sales Representative', '(123)555-0100', '(123)555-0102', NULL, '(123)555-0103', '123 6th Avenue', 'Redmond', 'WA', '99999', 'USA', 'http://northwindtraders.com#http://northwindtraders.com/#', 'Fluent in Japanese and can read and write French, Portuguese, and Spanish.', ''); -INSERT INTO `employees` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (7, 'Northwind Traders', 'Zare', 'Robert', 'robert@northwindtraders.com', 'Sales Representative', '(123)555-0100', '(123)555-0102', NULL, '(123)555-0103', '123 7th Avenue', 'Seattle', 'WA', '99999', 'USA', 'http://northwindtraders.com#http://northwindtraders.com/#', NULL, ''); -INSERT INTO `employees` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (8, 'Northwind Traders', 'Giussani', 'Laura', 'laura@northwindtraders.com', 'Sales Coordinator', '(123)555-0100', '(123)555-0102', NULL, '(123)555-0103', '123 8th Avenue', 'Redmond', 'WA', '99999', 'USA', 'http://northwindtraders.com#http://northwindtraders.com/#', 'Reads and writes French.', ''); -INSERT INTO `employees` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (9, 'Northwind Traders', 'Hellung-Larsen', 'Anne', 'anne@northwindtraders.com', 'Sales Representative', '(123)555-0100', '(123)555-0102', NULL, '(123)555-0103', '123 9th Avenue', 'Seattle', 'WA', '99999', 'USA', 'http://northwindtraders.com#http://northwindtraders.com/#', 'Fluent in French and German.', ''); -# 9 records - -# -# Dumping data for table 'inventory_transaction_types' -# - -INSERT INTO `inventory_transaction_types` (`id`, `type_name`) VALUES (1, 'Purchased'); -INSERT INTO `inventory_transaction_types` (`id`, `type_name`) VALUES (2, 'Sold'); -INSERT INTO `inventory_transaction_types` (`id`, `type_name`) VALUES (3, 'On Hold'); -INSERT INTO `inventory_transaction_types` (`id`, `type_name`) VALUES (4, 'Waste'); -# 4 records - -# -# Dumping data for table 'inventory_transactions' -# - -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (35, 1, '2006-03-22 16:02:28', '2006-03-22 16:02:28', 80, 75, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (36, 1, '2006-03-22 16:02:48', '2006-03-22 16:02:48', 72, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (37, 1, '2006-03-22 16:03:04', '2006-03-22 16:03:04', 52, 100, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (38, 1, '2006-03-22 16:03:09', '2006-03-22 16:03:09', 56, 120, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (39, 1, '2006-03-22 16:03:14', '2006-03-22 16:03:14', 57, 80, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (40, 1, '2006-03-22 16:03:40', '2006-03-22 16:03:40', 6, 100, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (41, 1, '2006-03-22 16:03:47', '2006-03-22 16:03:47', 7, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (42, 1, '2006-03-22 16:03:54', '2006-03-22 16:03:54', 8, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (43, 1, '2006-03-22 16:04:02', '2006-03-22 16:04:02', 14, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (44, 1, '2006-03-22 16:04:07', '2006-03-22 16:04:07', 17, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (45, 1, '2006-03-22 16:04:12', '2006-03-22 16:04:12', 19, 20, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (46, 1, '2006-03-22 16:04:17', '2006-03-22 16:04:17', 20, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (47, 1, '2006-03-22 16:04:20', '2006-03-22 16:04:20', 21, 20, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (48, 1, '2006-03-22 16:04:24', '2006-03-22 16:04:24', 40, 120, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (49, 1, '2006-03-22 16:04:28', '2006-03-22 16:04:28', 41, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (50, 1, '2006-03-22 16:04:31', '2006-03-22 16:04:31', 48, 100, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (51, 1, '2006-03-22 16:04:38', '2006-03-22 16:04:38', 51, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (52, 1, '2006-03-22 16:04:41', '2006-03-22 16:04:41', 74, 20, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (53, 1, '2006-03-22 16:04:45', '2006-03-22 16:04:45', 77, 60, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (54, 1, '2006-03-22 16:05:07', '2006-03-22 16:05:07', 3, 100, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (55, 1, '2006-03-22 16:05:11', '2006-03-22 16:05:11', 4, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (56, 1, '2006-03-22 16:05:14', '2006-03-22 16:05:14', 5, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (57, 1, '2006-03-22 16:05:26', '2006-03-22 16:05:26', 65, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (58, 1, '2006-03-22 16:05:32', '2006-03-22 16:05:32', 66, 80, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (59, 1, '2006-03-22 16:05:47', '2006-03-22 16:05:47', 1, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (60, 1, '2006-03-22 16:05:51', '2006-03-22 16:05:51', 34, 60, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (61, 1, '2006-03-22 16:06:00', '2006-03-22 16:06:00', 43, 100, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (62, 1, '2006-03-22 16:06:03', '2006-03-22 16:06:03', 81, 125, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (63, 2, '2006-03-22 16:07:56', '2006-03-24 11:03:00', 80, 30, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (64, 2, '2006-03-22 16:08:19', '2006-03-22 16:08:59', 7, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (65, 2, '2006-03-22 16:08:29', '2006-03-22 16:08:59', 51, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (66, 2, '2006-03-22 16:08:37', '2006-03-22 16:08:59', 80, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (67, 2, '2006-03-22 16:09:46', '2006-03-22 16:10:27', 1, 15, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (68, 2, '2006-03-22 16:10:06', '2006-03-22 16:10:27', 43, 20, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (69, 2, '2006-03-22 16:11:39', '2006-03-24 11:00:55', 19, 20, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (70, 2, '2006-03-22 16:11:56', '2006-03-24 10:59:41', 48, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (71, 2, '2006-03-22 16:12:29', '2006-03-24 10:57:38', 8, 17, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (72, 1, '2006-03-24 10:41:30', '2006-03-24 10:41:30', 81, 200, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (73, 2, '2006-03-24 10:41:33', '2006-03-24 10:41:42', 81, 200, NULL, NULL, 'Fill Back Ordered product, Order #40'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (74, 1, '2006-03-24 10:53:13', '2006-03-24 10:53:13', 48, 100, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (75, 2, '2006-03-24 10:53:16', '2006-03-24 10:55:46', 48, 100, NULL, NULL, 'Fill Back Ordered product, Order #39'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (76, 1, '2006-03-24 10:53:36', '2006-03-24 10:53:36', 43, 300, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (77, 2, '2006-03-24 10:53:39', '2006-03-24 10:56:57', 43, 300, NULL, NULL, 'Fill Back Ordered product, Order #38'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (78, 1, '2006-03-24 10:54:04', '2006-03-24 10:54:04', 41, 200, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (79, 2, '2006-03-24 10:54:07', '2006-03-24 10:58:40', 41, 200, NULL, NULL, 'Fill Back Ordered product, Order #36'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (80, 1, '2006-03-24 10:54:33', '2006-03-24 10:54:33', 19, 30, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (81, 2, '2006-03-24 10:54:35', '2006-03-24 11:02:02', 19, 30, NULL, NULL, 'Fill Back Ordered product, Order #33'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (82, 1, '2006-03-24 10:54:58', '2006-03-24 10:54:58', 34, 100, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (83, 2, '2006-03-24 10:55:02', '2006-03-24 11:03:00', 34, 100, NULL, NULL, 'Fill Back Ordered product, Order #30'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (84, 2, '2006-03-24 14:48:15', '2006-04-04 11:41:14', 6, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (85, 2, '2006-03-24 14:48:23', '2006-04-04 11:41:14', 4, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (86, 3, '2006-03-24 14:49:16', '2006-03-24 14:49:16', 80, 20, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (87, 3, '2006-03-24 14:49:20', '2006-03-24 14:49:20', 81, 50, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (88, 3, '2006-03-24 14:50:09', '2006-03-24 14:50:09', 1, 25, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (89, 3, '2006-03-24 14:50:14', '2006-03-24 14:50:14', 43, 25, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (90, 3, '2006-03-24 14:50:18', '2006-03-24 14:50:18', 81, 25, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (91, 2, '2006-03-24 14:51:03', '2006-04-04 11:09:24', 40, 50, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (92, 2, '2006-03-24 14:55:03', '2006-04-04 11:06:56', 21, 20, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (93, 2, '2006-03-24 14:55:39', '2006-04-04 11:06:13', 5, 25, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (94, 2, '2006-03-24 14:55:52', '2006-04-04 11:06:13', 41, 30, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (95, 2, '2006-03-24 14:56:09', '2006-04-04 11:06:13', 40, 30, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (96, 3, '2006-03-30 16:46:34', '2006-03-30 16:46:34', 34, 12, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (97, 3, '2006-03-30 17:23:27', '2006-03-30 17:23:27', 34, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (98, 3, '2006-03-30 17:24:33', '2006-03-30 17:24:33', 34, 1, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (99, 2, '2006-04-03 13:50:08', '2006-04-03 13:50:15', 48, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (100, 1, '2006-04-04 11:00:54', '2006-04-04 11:00:54', 57, 100, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (101, 2, '2006-04-04 11:00:56', '2006-04-04 11:08:49', 57, 100, NULL, NULL, 'Fill Back Ordered product, Order #46'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (102, 1, '2006-04-04 11:01:14', '2006-04-04 11:01:14', 34, 50, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (103, 1, '2006-04-04 11:01:35', '2006-04-04 11:01:35', 43, 250, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (104, 3, '2006-04-04 11:01:37', '2006-04-04 11:01:37', 43, 300, NULL, NULL, 'Fill Back Ordered product, Order #41'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (105, 1, '2006-04-04 11:01:55', '2006-04-04 11:01:55', 8, 25, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (106, 2, '2006-04-04 11:01:58', '2006-04-04 11:07:37', 8, 25, NULL, NULL, 'Fill Back Ordered product, Order #48'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (107, 1, '2006-04-04 11:02:17', '2006-04-04 11:02:17', 34, 300, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (108, 2, '2006-04-04 11:02:19', '2006-04-04 11:08:14', 34, 300, NULL, NULL, 'Fill Back Ordered product, Order #47'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (109, 1, '2006-04-04 11:02:37', '2006-04-04 11:02:37', 19, 25, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (110, 2, '2006-04-04 11:02:39', '2006-04-04 11:41:14', 19, 10, NULL, NULL, 'Fill Back Ordered product, Order #42'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (111, 1, '2006-04-04 11:02:56', '2006-04-04 11:02:56', 19, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (112, 2, '2006-04-04 11:02:58', '2006-04-04 11:07:37', 19, 25, NULL, NULL, 'Fill Back Ordered product, Order #48'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (113, 1, '2006-04-04 11:03:12', '2006-04-04 11:03:12', 72, 50, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (114, 2, '2006-04-04 11:03:14', '2006-04-04 11:08:49', 72, 50, NULL, NULL, 'Fill Back Ordered product, Order #46'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (115, 1, '2006-04-04 11:03:38', '2006-04-04 11:03:38', 41, 50, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (116, 2, '2006-04-04 11:03:39', '2006-04-04 11:09:24', 41, 50, NULL, NULL, 'Fill Back Ordered product, Order #45'); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (117, 2, '2006-04-04 11:04:55', '2006-04-04 11:05:04', 34, 87, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (118, 2, '2006-04-04 11:35:50', '2006-04-04 11:35:54', 51, 30, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (119, 2, '2006-04-04 11:35:51', '2006-04-04 11:35:54', 7, 30, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (120, 2, '2006-04-04 11:36:15', '2006-04-04 11:36:21', 17, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (121, 2, '2006-04-04 11:36:39', '2006-04-04 11:36:47', 6, 90, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (122, 2, '2006-04-04 11:37:06', '2006-04-04 11:37:09', 4, 30, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (123, 2, '2006-04-04 11:37:45', '2006-04-04 11:37:49', 48, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (124, 2, '2006-04-04 11:38:07', '2006-04-04 11:38:11', 48, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (125, 2, '2006-04-04 11:38:27', '2006-04-04 11:38:32', 41, 10, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (126, 2, '2006-04-04 11:38:48', '2006-04-04 11:38:53', 43, 5, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (127, 2, '2006-04-04 11:39:12', '2006-04-04 11:39:29', 40, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (128, 2, '2006-04-04 11:39:50', '2006-04-04 11:39:53', 8, 20, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (129, 2, '2006-04-04 11:40:13', '2006-04-04 11:40:16', 80, 15, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (130, 2, '2006-04-04 11:40:32', '2006-04-04 11:40:38', 74, 20, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (131, 2, '2006-04-04 11:41:39', '2006-04-04 11:41:45', 72, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (132, 2, '2006-04-04 11:42:17', '2006-04-04 11:42:26', 3, 50, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (133, 2, '2006-04-04 11:42:24', '2006-04-04 11:42:26', 8, 3, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (134, 2, '2006-04-04 11:42:48', '2006-04-04 11:43:08', 20, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (135, 2, '2006-04-04 11:43:05', '2006-04-04 11:43:08', 52, 40, NULL, NULL, NULL); -INSERT INTO `inventory_transactions` (`id`, `transaction_type`, `transaction_created_date`, `transaction_modified_date`, `product_id`, `quantity`, `purchase_order_id`, `customer_order_id`, `comments`) VALUES (136, 3, '2006-04-25 17:04:05', '2006-04-25 17:04:57', 56, 110, NULL, NULL, NULL); -# 102 records - -# -# Dumping data for table 'invoices' -# - -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (5, 31, '2006-03-22 16:08:59', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (6, 32, '2006-03-22 16:10:27', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (7, 40, '2006-03-24 10:41:41', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (8, 39, '2006-03-24 10:55:46', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (9, 38, '2006-03-24 10:56:57', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (10, 37, '2006-03-24 10:57:38', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (11, 36, '2006-03-24 10:58:40', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (12, 35, '2006-03-24 10:59:41', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (13, 34, '2006-03-24 11:00:55', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (14, 33, '2006-03-24 11:02:02', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (15, 30, '2006-03-24 11:03:00', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (16, 56, '2006-04-03 13:50:15', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (17, 55, '2006-04-04 11:05:04', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (18, 51, '2006-04-04 11:06:13', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (19, 50, '2006-04-04 11:06:56', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (20, 48, '2006-04-04 11:07:37', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (21, 47, '2006-04-04 11:08:14', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (22, 46, '2006-04-04 11:08:49', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (23, 45, '2006-04-04 11:09:24', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (24, 79, '2006-04-04 11:35:54', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (25, 78, '2006-04-04 11:36:21', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (26, 77, '2006-04-04 11:36:47', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (27, 76, '2006-04-04 11:37:09', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (28, 75, '2006-04-04 11:37:49', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (29, 74, '2006-04-04 11:38:11', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (30, 73, '2006-04-04 11:38:32', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (31, 72, '2006-04-04 11:38:53', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (32, 71, '2006-04-04 11:39:29', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (33, 70, '2006-04-04 11:39:53', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (34, 69, '2006-04-04 11:40:16', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (35, 67, '2006-04-04 11:40:38', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (36, 42, '2006-04-04 11:41:14', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (37, 60, '2006-04-04 11:41:45', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (38, 63, '2006-04-04 11:42:26', NULL, 0, 0, 0); -INSERT INTO `invoices` (`id`, `order_id`, `invoice_date`, `due_date`, `tax`, `shipping`, `amount_due`) VALUES (39, 58, '2006-04-04 11:43:08', NULL, 0, 0, 0); -# 35 records - -# -# Dumping data for table 'order_details' -# - -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (27, 30, 34, 100, 14, 0, 2, NULL, 96, 83); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (28, 30, 80, 30, 3.5, 0, 2, NULL, NULL, 63); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (29, 31, 7, 10, 30, 0, 2, NULL, NULL, 64); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (30, 31, 51, 10, 53, 0, 2, NULL, NULL, 65); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (31, 31, 80, 10, 3.5, 0, 2, NULL, NULL, 66); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (32, 32, 1, 15, 18, 0, 2, NULL, NULL, 67); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (33, 32, 43, 20, 46, 0, 2, NULL, NULL, 68); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (34, 33, 19, 30, 9.2, 0, 2, NULL, 97, 81); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (35, 34, 19, 20, 9.2, 0, 2, NULL, NULL, 69); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (36, 35, 48, 10, 12.75, 0, 2, NULL, NULL, 70); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (37, 36, 41, 200, 9.65, 0, 2, NULL, 98, 79); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (38, 37, 8, 17, 40, 0, 2, NULL, NULL, 71); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (39, 38, 43, 300, 46, 0, 2, NULL, 99, 77); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (40, 39, 48, 100, 12.75, 0, 2, NULL, 100, 75); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (41, 40, 81, 200, 2.99, 0, 2, NULL, 101, 73); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (42, 41, 43, 300, 46, 0, 1, NULL, 102, 104); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (43, 42, 6, 10, 25, 0, 2, NULL, NULL, 84); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (44, 42, 4, 10, 22, 0, 2, NULL, NULL, 85); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (45, 42, 19, 10, 9.2, 0, 2, NULL, 103, 110); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (46, 43, 80, 20, 3.5, 0, 1, NULL, NULL, 86); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (47, 43, 81, 50, 2.99, 0, 1, NULL, NULL, 87); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (48, 44, 1, 25, 18, 0, 1, NULL, NULL, 88); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (49, 44, 43, 25, 46, 0, 1, NULL, NULL, 89); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (50, 44, 81, 25, 2.99, 0, 1, NULL, NULL, 90); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (51, 45, 41, 50, 9.65, 0, 2, NULL, 104, 116); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (52, 45, 40, 50, 18.4, 0, 2, NULL, NULL, 91); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (53, 46, 57, 100, 19.5, 0, 2, NULL, 105, 101); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (54, 46, 72, 50, 34.8, 0, 2, NULL, 106, 114); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (55, 47, 34, 300, 14, 0, 2, NULL, 107, 108); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (56, 48, 8, 25, 40, 0, 2, NULL, 108, 106); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (57, 48, 19, 25, 9.2, 0, 2, NULL, 109, 112); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (59, 50, 21, 20, 10, 0, 2, NULL, NULL, 92); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (60, 51, 5, 25, 21.35, 0, 2, NULL, NULL, 93); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (61, 51, 41, 30, 9.65, 0, 2, NULL, NULL, 94); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (62, 51, 40, 30, 18.4, 0, 2, NULL, NULL, 95); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (66, 56, 48, 10, 12.75, 0, 2, NULL, 111, 99); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (67, 55, 34, 87, 14, 0, 2, NULL, NULL, 117); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (68, 79, 7, 30, 30, 0, 2, NULL, NULL, 119); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (69, 79, 51, 30, 53, 0, 2, NULL, NULL, 118); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (70, 78, 17, 40, 39, 0, 2, NULL, NULL, 120); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (71, 77, 6, 90, 25, 0, 2, NULL, NULL, 121); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (72, 76, 4, 30, 22, 0, 2, NULL, NULL, 122); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (73, 75, 48, 40, 12.75, 0, 2, NULL, NULL, 123); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (74, 74, 48, 40, 12.75, 0, 2, NULL, NULL, 124); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (75, 73, 41, 10, 9.65, 0, 2, NULL, NULL, 125); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (76, 72, 43, 5, 46, 0, 2, NULL, NULL, 126); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (77, 71, 40, 40, 18.4, 0, 2, NULL, NULL, 127); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (78, 70, 8, 20, 40, 0, 2, NULL, NULL, 128); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (79, 69, 80, 15, 3.5, 0, 2, NULL, NULL, 129); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (80, 67, 74, 20, 10, 0, 2, NULL, NULL, 130); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (81, 60, 72, 40, 34.8, 0, 2, NULL, NULL, 131); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (82, 63, 3, 50, 10, 0, 2, NULL, NULL, 132); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (83, 63, 8, 3, 40, 0, 2, NULL, NULL, 133); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (84, 58, 20, 40, 81, 0, 2, NULL, NULL, 134); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (85, 58, 52, 40, 7, 0, 2, NULL, NULL, 135); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (86, 80, 56, 10, 38, 0, 1, NULL, NULL, 136); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (90, 81, 81, 0, 2.99, 0, 5, NULL, NULL, NULL); -INSERT INTO `order_details` (`id`, `order_id`, `product_id`, `quantity`, `unit_price`, `discount`, `status_id`, `date_allocated`, `purchase_order_id`, `inventory_id`) VALUES (91, 81, 56, 0, 38, 0, 0, NULL, NULL, NULL); -# 58 records - -# -# Dumping data for table 'order_details_status' -# - -INSERT INTO `order_details_status` (`id`, `status_name`) VALUES (0, 'None'); -INSERT INTO `order_details_status` (`id`, `status_name`) VALUES (1, 'Allocated'); -INSERT INTO `order_details_status` (`id`, `status_name`) VALUES (2, 'Invoiced'); -INSERT INTO `order_details_status` (`id`, `status_name`) VALUES (3, 'Shipped'); -INSERT INTO `order_details_status` (`id`, `status_name`) VALUES (4, 'On Order'); -INSERT INTO `order_details_status` (`id`, `status_name`) VALUES (5, 'No Stock'); -# 6 records - -# -# Dumping data for table 'orders' -# - -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (30, 9, 27, '2006-01-15 00:00:00', '2006-01-22 00:00:00', 2, 'Karen Toh', '789 27th Street', 'Las Vegas', 'NV', '99999', 'USA', 200, 0, 'Check', '2006-01-15 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (31, 3, 4, '2006-01-20 00:00:00', '2006-01-22 00:00:00', 1, 'Christina Lee', '123 4th Street', 'New York', 'NY', '99999', 'USA', 5, 0, 'Credit Card', '2006-01-20 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (32, 4, 12, '2006-01-22 00:00:00', '2006-01-22 00:00:00', 2, 'John Edwards', '123 12th Street', 'Las Vegas', 'NV', '99999', 'USA', 5, 0, 'Credit Card', '2006-01-22 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (33, 6, 8, '2006-01-30 00:00:00', '2006-01-31 00:00:00', 3, 'Elizabeth Andersen', '123 8th Street', 'Portland', 'OR', '99999', 'USA', 50, 0, 'Credit Card', '2006-01-30 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (34, 9, 4, '2006-02-06 00:00:00', '2006-02-07 00:00:00', 3, 'Christina Lee', '123 4th Street', 'New York', 'NY', '99999', 'USA', 4, 0, 'Check', '2006-02-06 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (35, 3, 29, '2006-02-10 00:00:00', '2006-02-12 00:00:00', 2, 'Soo Jung Lee', '789 29th Street', 'Denver', 'CO', '99999', 'USA', 7, 0, 'Check', '2006-02-10 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (36, 4, 3, '2006-02-23 00:00:00', '2006-02-25 00:00:00', 2, 'Thomas Axen', '123 3rd Street', 'Los Angelas', 'CA', '99999', 'USA', 7, 0, 'Cash', '2006-02-23 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (37, 8, 6, '2006-03-06 00:00:00', '2006-03-09 00:00:00', 2, 'Francisco Pérez-Olaeta', '123 6th Street', 'Milwaukee', 'WI', '99999', 'USA', 12, 0, 'Credit Card', '2006-03-06 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (38, 9, 28, '2006-03-10 00:00:00', '2006-03-11 00:00:00', 3, 'Amritansh Raghav', '789 28th Street', 'Memphis', 'TN', '99999', 'USA', 10, 0, 'Check', '2006-03-10 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (39, 3, 8, '2006-03-22 00:00:00', '2006-03-24 00:00:00', 3, 'Elizabeth Andersen', '123 8th Street', 'Portland', 'OR', '99999', 'USA', 5, 0, 'Check', '2006-03-22 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (40, 4, 10, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, 'Roland Wacker', '123 10th Street', 'Chicago', 'IL', '99999', 'USA', 9, 0, 'Credit Card', '2006-03-24 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (41, 1, 7, '2006-03-24 00:00:00', NULL, NULL, 'Ming-Yang Xie', '123 7th Street', 'Boise', 'ID', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (42, 1, 10, '2006-03-24 00:00:00', '2006-04-07 00:00:00', 1, 'Roland Wacker', '123 10th Street', 'Chicago', 'IL', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 2); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (43, 1, 11, '2006-03-24 00:00:00', NULL, 3, 'Peter Krschne', '123 11th Street', 'Miami', 'FL', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (44, 1, 1, '2006-03-24 00:00:00', NULL, NULL, 'Anna Bedecs', '123 1st Street', 'Seattle', 'WA', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (45, 1, 28, '2006-04-07 00:00:00', '2006-04-07 00:00:00', 3, 'Amritansh Raghav', '789 28th Street', 'Memphis', 'TN', '99999', 'USA', 40, 0, 'Credit Card', '2006-04-07 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (46, 7, 9, '2006-04-05 00:00:00', '2006-04-05 00:00:00', 1, 'Sven Mortensen', '123 9th Street', 'Salt Lake City', 'UT', '99999', 'USA', 100, 0, 'Check', '2006-04-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (47, 6, 6, '2006-04-08 00:00:00', '2006-04-08 00:00:00', 2, 'Francisco Pérez-Olaeta', '123 6th Street', 'Milwaukee', 'WI', '99999', 'USA', 300, 0, 'Credit Card', '2006-04-08 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (48, 4, 8, '2006-04-05 00:00:00', '2006-04-05 00:00:00', 2, 'Elizabeth Andersen', '123 8th Street', 'Portland', 'OR', '99999', 'USA', 50, 0, 'Check', '2006-04-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (50, 9, 25, '2006-04-05 00:00:00', '2006-04-05 00:00:00', 1, 'John Rodman', '789 25th Street', 'Chicago', 'IL', '99999', 'USA', 5, 0, 'Cash', '2006-04-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (51, 9, 26, '2006-04-05 00:00:00', '2006-04-05 00:00:00', 3, 'Run Liu', '789 26th Street', 'Miami', 'FL', '99999', 'USA', 60, 0, 'Credit Card', '2006-04-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (55, 1, 29, '2006-04-05 00:00:00', '2006-04-05 00:00:00', 2, 'Soo Jung Lee', '789 29th Street', 'Denver', 'CO', '99999', 'USA', 200, 0, 'Check', '2006-04-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (56, 2, 6, '2006-04-03 00:00:00', '2006-04-03 00:00:00', 3, 'Francisco Pérez-Olaeta', '123 6th Street', 'Milwaukee', 'WI', '99999', 'USA', 0, 0, 'Check', '2006-04-03 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (57, 9, 27, '2006-04-22 00:00:00', '2006-04-22 00:00:00', 2, 'Karen Toh', '789 27th Street', 'Las Vegas', 'NV', '99999', 'USA', 200, 0, 'Check', '2006-04-22 00:00:00', NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (58, 3, 4, '2006-04-22 00:00:00', '2006-04-22 00:00:00', 1, 'Christina Lee', '123 4th Street', 'New York', 'NY', '99999', 'USA', 5, 0, 'Credit Card', '2006-04-22 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (59, 4, 12, '2006-04-22 00:00:00', '2006-04-22 00:00:00', 2, 'John Edwards', '123 12th Street', 'Las Vegas', 'NV', '99999', 'USA', 5, 0, 'Credit Card', '2006-04-22 00:00:00', NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (60, 6, 8, '2006-04-30 00:00:00', '2006-04-30 00:00:00', 3, 'Elizabeth Andersen', '123 8th Street', 'Portland', 'OR', '99999', 'USA', 50, 0, 'Credit Card', '2006-04-30 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (61, 9, 4, '2006-04-07 00:00:00', '2006-04-07 00:00:00', 3, 'Christina Lee', '123 4th Street', 'New York', 'NY', '99999', 'USA', 4, 0, 'Check', '2006-04-07 00:00:00', NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (62, 3, 29, '2006-04-12 00:00:00', '2006-04-12 00:00:00', 2, 'Soo Jung Lee', '789 29th Street', 'Denver', 'CO', '99999', 'USA', 7, 0, 'Check', '2006-04-12 00:00:00', NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (63, 4, 3, '2006-04-25 00:00:00', '2006-04-25 00:00:00', 2, 'Thomas Axen', '123 3rd Street', 'Los Angelas', 'CA', '99999', 'USA', 7, 0, 'Cash', '2006-04-25 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (64, 8, 6, '2006-05-09 00:00:00', '2006-05-09 00:00:00', 2, 'Francisco Pérez-Olaeta', '123 6th Street', 'Milwaukee', 'WI', '99999', 'USA', 12, 0, 'Credit Card', '2006-05-09 00:00:00', NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (65, 9, 28, '2006-05-11 00:00:00', '2006-05-11 00:00:00', 3, 'Amritansh Raghav', '789 28th Street', 'Memphis', 'TN', '99999', 'USA', 10, 0, 'Check', '2006-05-11 00:00:00', NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (66, 3, 8, '2006-05-24 00:00:00', '2006-05-24 00:00:00', 3, 'Elizabeth Andersen', '123 8th Street', 'Portland', 'OR', '99999', 'USA', 5, 0, 'Check', '2006-05-24 00:00:00', NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (67, 4, 10, '2006-05-24 00:00:00', '2006-05-24 00:00:00', 2, 'Roland Wacker', '123 10th Street', 'Chicago', 'IL', '99999', 'USA', 9, 0, 'Credit Card', '2006-05-24 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (68, 1, 7, '2006-05-24 00:00:00', NULL, NULL, 'Ming-Yang Xie', '123 7th Street', 'Boise', 'ID', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (69, 1, 10, '2006-05-24 00:00:00', NULL, 1, 'Roland Wacker', '123 10th Street', 'Chicago', 'IL', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (70, 1, 11, '2006-05-24 00:00:00', NULL, 3, 'Peter Krschne', '123 11th Street', 'Miami', 'FL', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (71, 1, 1, '2006-05-24 00:00:00', NULL, 3, 'Anna Bedecs', '123 1st Street', 'Seattle', 'WA', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (72, 1, 28, '2006-06-07 00:00:00', '2006-06-07 00:00:00', 3, 'Amritansh Raghav', '789 28th Street', 'Memphis', 'TN', '99999', 'USA', 40, 0, 'Credit Card', '2006-06-07 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (73, 7, 9, '2006-06-05 00:00:00', '2006-06-05 00:00:00', 1, 'Sven Mortensen', '123 9th Street', 'Salt Lake City', 'UT', '99999', 'USA', 100, 0, 'Check', '2006-06-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (74, 6, 6, '2006-06-08 00:00:00', '2006-06-08 00:00:00', 2, 'Francisco Pérez-Olaeta', '123 6th Street', 'Milwaukee', 'WI', '99999', 'USA', 300, 0, 'Credit Card', '2006-06-08 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (75, 4, 8, '2006-06-05 00:00:00', '2006-06-05 00:00:00', 2, 'Elizabeth Andersen', '123 8th Street', 'Portland', 'OR', '99999', 'USA', 50, 0, 'Check', '2006-06-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (76, 9, 25, '2006-06-05 00:00:00', '2006-06-05 00:00:00', 1, 'John Rodman', '789 25th Street', 'Chicago', 'IL', '99999', 'USA', 5, 0, 'Cash', '2006-06-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (77, 9, 26, '2006-06-05 00:00:00', '2006-06-05 00:00:00', 3, 'Run Liu', '789 26th Street', 'Miami', 'FL', '99999', 'USA', 60, 0, 'Credit Card', '2006-06-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (78, 1, 29, '2006-06-05 00:00:00', '2006-06-05 00:00:00', 2, 'Soo Jung Lee', '789 29th Street', 'Denver', 'CO', '99999', 'USA', 200, 0, 'Check', '2006-06-05 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (79, 2, 6, '2006-06-23 00:00:00', '2006-06-23 00:00:00', 3, 'Francisco Pérez-Olaeta', '123 6th Street', 'Milwaukee', 'WI', '99999', 'USA', 0, 0, 'Check', '2006-06-23 00:00:00', NULL, 0, NULL, 3); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (80, 2, 4, '2006-04-25 17:03:55', NULL, NULL, 'Christina Lee', '123 4th Street', 'New York', 'NY', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 0); -INSERT INTO `orders` (`id`, `employee_id`, `customer_id`, `order_date`, `shipped_date`, `shipper_id`, `ship_name`, `ship_address`, `ship_city`, `ship_state_province`, `ship_zip_postal_code`, `ship_country_region`, `shipping_fee`, `taxes`, `payment_type`, `paid_date`, `notes`, `tax_rate`, `tax_status_id`, `status_id`) VALUES (81, 2, 3, '2006-04-25 17:26:53', NULL, NULL, 'Thomas Axen', '123 3rd Street', 'Los Angelas', 'CA', '99999', 'USA', 0, 0, NULL, NULL, NULL, 0, NULL, 0); -# 48 records - -# -# Dumping data for table 'orders_status' -# - -INSERT INTO `orders_status` (`id`, `status_name`) VALUES (0, 'New'); -INSERT INTO `orders_status` (`id`, `status_name`) VALUES (1, 'Invoiced'); -INSERT INTO `orders_status` (`id`, `status_name`) VALUES (2, 'Shipped'); -INSERT INTO `orders_status` (`id`, `status_name`) VALUES (3, 'Closed'); -# 4 records - -# -# Dumping data for table 'orders_tax_status' -# - -INSERT INTO `orders_tax_status` (`id`, `tax_status_name`) VALUES (0, 'Tax Exempt'); -INSERT INTO `orders_tax_status` (`id`, `tax_status_name`) VALUES (1, 'Taxable'); -# 2 records - -# -# Dumping data for table 'privileges' -# - -INSERT INTO `privileges` (`id`, `privilege_name`) VALUES (2, 'Purchase Approvals'); -# 1 records - -# -# Dumping data for table 'products' -# - -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('4', 1, 'NWTB-1', 'Northwind Traders Chai', NULL, 13.5, 18, 10, 40, '10 boxes x 20 bags', 0, 10, 'Beverages', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('10', 3, 'NWTCO-3', 'Northwind Traders Syrup', NULL, 7.5, 10, 25, 100, '12 - 550 ml bottles', 0, 25, 'Condiments', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('10', 4, 'NWTCO-4', 'Northwind Traders Cajun Seasoning', NULL, 16.5, 22, 10, 40, '48 - 6 oz jars', 0, 10, 'Condiments', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('10', 5, 'NWTO-5', 'Northwind Traders Olive Oil', NULL, 16.0125, 21.35, 10, 40, '36 boxes', 0, 10, 'Oil', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('2;6', 6, 'NWTJP-6', 'Northwind Traders Boysenberry Spread', NULL, 18.75, 25, 25, 100, '12 - 8 oz jars', 0, 25, 'Jams, Preserves', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('2', 7, 'NWTDFN-7', 'Northwind Traders Dried Pears', NULL, 22.5, 30, 10, 40, '12 - 1 lb pkgs.', 0, 10, 'Dried Fruit & Nuts', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('8', 8, 'NWTS-8', 'Northwind Traders Curry Sauce', NULL, 30, 40, 10, 40, '12 - 12 oz jars', 0, 10, 'Sauces', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('2;6', 14, 'NWTDFN-14', 'Northwind Traders Walnuts', NULL, 17.4375, 23.25, 10, 40, '40 - 100 g pkgs.', 0, 10, 'Dried Fruit & Nuts', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 17, 'NWTCFV-17', 'Northwind Traders Fruit Cocktail', NULL, 29.25, 39, 10, 40, '15.25 OZ', 0, 10, 'Canned Fruit & Vegetables', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('1', 19, 'NWTBGM-19', 'Northwind Traders Chocolate Biscuits Mix', NULL, 6.9, 9.2, 5, 20, '10 boxes x 12 pieces', 0, 5, 'Baked Goods & Mixes', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('2;6', 20, 'NWTJP-6', 'Northwind Traders Marmalade', NULL, 60.75, 81, 10, 40, '30 gift boxes', 0, 10, 'Jams, Preserves', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('1', 21, 'NWTBGM-21', 'Northwind Traders Scones', NULL, 7.5, 10, 5, 20, '24 pkgs. x 4 pieces', 0, 5, 'Baked Goods & Mixes', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('4', 34, 'NWTB-34', 'Northwind Traders Beer', NULL, 10.5, 14, 15, 60, '24 - 12 oz bottles', 0, 15, 'Beverages', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('7', 40, 'NWTCM-40', 'Northwind Traders Crab Meat', NULL, 13.8, 18.4, 30, 120, '24 - 4 oz tins', 0, 30, 'Canned Meat', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 41, 'NWTSO-41', 'Northwind Traders Clam Chowder', NULL, 7.2375, 9.65, 10, 40, '12 - 12 oz cans', 0, 10, 'Soups', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('3;4', 43, 'NWTB-43', 'Northwind Traders Coffee', NULL, 34.5, 46, 25, 100, '16 - 500 g tins', 0, 25, 'Beverages', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('10', 48, 'NWTCA-48', 'Northwind Traders Chocolate', NULL, 9.5625, 12.75, 25, 100, '10 pkgs', 0, 25, 'Candy', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('2', 51, 'NWTDFN-51', 'Northwind Traders Dried Apples', NULL, 39.75, 53, 10, 40, '50 - 300 g pkgs.', 0, 10, 'Dried Fruit & Nuts', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('1', 52, 'NWTG-52', 'Northwind Traders Long Grain Rice', NULL, 5.25, 7, 25, 100, '16 - 2 kg boxes', 0, 25, 'Grains', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('1', 56, 'NWTP-56', 'Northwind Traders Gnocchi', NULL, 28.5, 38, 30, 120, '24 - 250 g pkgs.', 0, 30, 'Pasta', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('1', 57, 'NWTP-57', 'Northwind Traders Ravioli', NULL, 14.625, 19.5, 20, 80, '24 - 250 g pkgs.', 0, 20, 'Pasta', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('8', 65, 'NWTS-65', 'Northwind Traders Hot Pepper Sauce', NULL, 15.7875, 21.05, 10, 40, '32 - 8 oz bottles', 0, 10, 'Sauces', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('8', 66, 'NWTS-66', 'Northwind Traders Tomato Sauce', NULL, 12.75, 17, 20, 80, '24 - 8 oz jars', 0, 20, 'Sauces', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('5', 72, 'NWTD-72', 'Northwind Traders Mozzarella', NULL, 26.1, 34.8, 10, 40, '24 - 200 g pkgs.', 0, 10, 'Dairy products', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('2;6', 74, 'NWTDFN-74', 'Northwind Traders Almonds', NULL, 7.5, 10, 5, 20, '5 kg pkg.', 0, 5, 'Dried Fruit & Nuts', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('10', 77, 'NWTCO-77', 'Northwind Traders Mustard', NULL, 9.75, 13, 15, 60, '12 boxes', 0, 15, 'Condiments', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('2', 80, 'NWTDFN-80', 'Northwind Traders Dried Plums', NULL, 3, 3.5, 50, 75, '1 lb bag', 0, 25, 'Dried Fruit & Nuts', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('3', 81, 'NWTB-81', 'Northwind Traders Green Tea', NULL, 2, 2.99, 100, 125, '20 bags per box', 0, 25, 'Beverages', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('1', 82, 'NWTC-82', 'Northwind Traders Granola', NULL, 2, 4, 20, 100, NULL, 0, NULL, 'Cereal', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('9', 83, 'NWTCS-83', 'Northwind Traders Potato Chips', NULL, .5, 1.8, 30, 200, NULL, 0, NULL, 'Chips, Snacks', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('1', 85, 'NWTBGM-85', 'Northwind Traders Brownie Mix', NULL, 9, 12.49, 10, 20, '3 boxes', 0, 5, 'Baked Goods & Mixes', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('1', 86, 'NWTBGM-86', 'Northwind Traders Cake Mix', NULL, 10.5, 15.99, 10, 20, '4 boxes', 0, 5, 'Baked Goods & Mixes', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('7', 87, 'NWTB-87', 'Northwind Traders Tea', NULL, 2, 4, 20, 50, '100 count per box', 0, NULL, 'Beverages', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 88, 'NWTCFV-88', 'Northwind Traders Pears', NULL, 1, 1.3, 10, 40, '15.25 OZ', 0, NULL, 'Canned Fruit & Vegetables', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 89, 'NWTCFV-89', 'Northwind Traders Peaches', NULL, 1, 1.5, 10, 40, '15.25 OZ', 0, NULL, 'Canned Fruit & Vegetables', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 90, 'NWTCFV-90', 'Northwind Traders Pineapple', NULL, 1, 1.8, 10, 40, '15.25 OZ', 0, NULL, 'Canned Fruit & Vegetables', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 91, 'NWTCFV-91', 'Northwind Traders Cherry Pie Filling', NULL, 1, 2, 10, 40, '15.25 OZ', 0, NULL, 'Canned Fruit & Vegetables', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 92, 'NWTCFV-92', 'Northwind Traders Green Beans', NULL, 1, 1.2, 10, 40, '14.5 OZ', 0, NULL, 'Canned Fruit & Vegetables', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 93, 'NWTCFV-93', 'Northwind Traders Corn', NULL, 1, 1.2, 10, 40, '14.5 OZ', 0, NULL, 'Canned Fruit & Vegetables', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 94, 'NWTCFV-94', 'Northwind Traders Peas', NULL, 1, 1.5, 10, 40, '14.5 OZ', 0, NULL, 'Canned Fruit & Vegetables', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('7', 95, 'NWTCM-95', 'Northwind Traders Tuna Fish', NULL, .5, 2, 30, 50, '5 oz', 0, NULL, 'Canned Meat', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('7', 96, 'NWTCM-96', 'Northwind Traders Smoked Salmon', NULL, 2, 4, 30, 50, '5 oz', 0, NULL, 'Canned Meat', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('1', 97, 'NWTC-82', 'Northwind Traders Hot Cereal', NULL, 3, 5, 50, 200, NULL, 0, NULL, 'Cereal', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 98, 'NWTSO-98', 'Northwind Traders Vegetable Soup', NULL, 1, 1.89, 100, 200, NULL, 0, NULL, 'Soups', ''); -INSERT INTO `products` (`supplier_ids`, `id`, `product_code`, `product_name`, `description`, `standard_cost`, `list_price`, `reorder_level`, `target_level`, `quantity_per_unit`, `discontinued`, `minimum_reorder_quantity`, `category`, `attachments`) VALUES ('6', 99, 'NWTSO-99', 'Northwind Traders Chicken Soup', NULL, 1, 1.95, 100, 200, NULL, 0, NULL, 'Soups', ''); -# 45 records - -# -# Dumping data for table 'purchase_order_details' -# - -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (238, 90, 1, 40, 14, '2006-01-22 00:00:00', 1, 59); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (239, 91, 3, 100, 8, '2006-01-22 00:00:00', 1, 54); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (240, 91, 4, 40, 16, '2006-01-22 00:00:00', 1, 55); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (241, 91, 5, 40, 16, '2006-01-22 00:00:00', 1, 56); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (242, 92, 6, 100, 19, '2006-01-22 00:00:00', 1, 40); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (243, 92, 7, 40, 22, '2006-01-22 00:00:00', 1, 41); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (244, 92, 8, 40, 30, '2006-01-22 00:00:00', 1, 42); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (245, 92, 14, 40, 17, '2006-01-22 00:00:00', 1, 43); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (246, 92, 17, 40, 29, '2006-01-22 00:00:00', 1, 44); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (247, 92, 19, 20, 7, '2006-01-22 00:00:00', 1, 45); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (248, 92, 20, 40, 61, '2006-01-22 00:00:00', 1, 46); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (249, 92, 21, 20, 8, '2006-01-22 00:00:00', 1, 47); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (250, 90, 34, 60, 10, '2006-01-22 00:00:00', 1, 60); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (251, 92, 40, 120, 14, '2006-01-22 00:00:00', 1, 48); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (252, 92, 41, 40, 7, '2006-01-22 00:00:00', 1, 49); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (253, 90, 43, 100, 34, '2006-01-22 00:00:00', 1, 61); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (254, 92, 48, 100, 10, '2006-01-22 00:00:00', 1, 50); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (255, 92, 51, 40, 40, '2006-01-22 00:00:00', 1, 51); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (256, 93, 52, 100, 5, '2006-01-22 00:00:00', 1, 37); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (257, 93, 56, 120, 28, '2006-01-22 00:00:00', 1, 38); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (258, 93, 57, 80, 15, '2006-01-22 00:00:00', 1, 39); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (259, 91, 65, 40, 16, '2006-01-22 00:00:00', 1, 57); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (260, 91, 66, 80, 13, '2006-01-22 00:00:00', 1, 58); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (261, 94, 72, 40, 26, '2006-01-22 00:00:00', 1, 36); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (262, 92, 74, 20, 8, '2006-01-22 00:00:00', 1, 52); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (263, 92, 77, 60, 10, '2006-01-22 00:00:00', 1, 53); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (264, 95, 80, 75, 3, '2006-01-22 00:00:00', 1, 35); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (265, 90, 81, 125, 2, '2006-01-22 00:00:00', 1, 62); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (266, 96, 34, 100, 10, '2006-01-22 00:00:00', 1, 82); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (267, 97, 19, 30, 7, '2006-01-22 00:00:00', 1, 80); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (268, 98, 41, 200, 7, '2006-01-22 00:00:00', 1, 78); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (269, 99, 43, 300, 34, '2006-01-22 00:00:00', 1, 76); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (270, 100, 48, 100, 10, '2006-01-22 00:00:00', 1, 74); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (271, 101, 81, 200, 2, '2006-01-22 00:00:00', 1, 72); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (272, 102, 43, 300, 34, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (273, 103, 19, 10, 7, '2006-04-17 00:00:00', 1, 111); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (274, 104, 41, 50, 7, '2006-04-06 00:00:00', 1, 115); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (275, 105, 57, 100, 15, '2006-04-05 00:00:00', 1, 100); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (276, 106, 72, 50, 26, '2006-04-05 00:00:00', 1, 113); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (277, 107, 34, 300, 10, '2006-04-05 00:00:00', 1, 107); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (278, 108, 8, 25, 30, '2006-04-05 00:00:00', 1, 105); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (279, 109, 19, 25, 7, '2006-04-05 00:00:00', 1, 109); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (280, 110, 43, 250, 34, '2006-04-10 00:00:00', 1, 103); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (281, 90, 1, 40, 14, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (282, 92, 19, 20, 7, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (283, 111, 34, 50, 10, '2006-04-04 00:00:00', 1, 102); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (285, 91, 3, 50, 8, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (286, 91, 4, 40, 16, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (288, 140, 85, 10, 9, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (289, 141, 6, 10, 18.75, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (290, 142, 1, 1, 13.5, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (292, 146, 20, 40, 60, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (293, 146, 51, 40, 39, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (294, 147, 40, 120, 13, NULL, 0, NULL); -INSERT INTO `purchase_order_details` (`id`, `purchase_order_id`, `product_id`, `quantity`, `unit_cost`, `date_received`, `posted_to_inventory`, `inventory_id`) VALUES (295, 148, 72, 40, 26, NULL, 0, NULL); -# 55 records - -# -# Dumping data for table 'purchase_order_status' -# - -INSERT INTO `purchase_order_status` (`id`, `status`) VALUES (0, 'New'); -INSERT INTO `purchase_order_status` (`id`, `status`) VALUES (1, 'Submitted'); -INSERT INTO `purchase_order_status` (`id`, `status`) VALUES (2, 'Approved'); -INSERT INTO `purchase_order_status` (`id`, `status`) VALUES (3, 'Closed'); -# 4 records - -# -# Dumping data for table 'purchase_orders' -# - -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (90, 1, 2, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, NULL, 2, '2006-01-22 00:00:00', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (91, 3, 2, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, NULL, 2, '2006-01-22 00:00:00', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (92, 2, 2, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, NULL, 2, '2006-01-22 00:00:00', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (93, 5, 2, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, NULL, 2, '2006-01-22 00:00:00', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (94, 6, 2, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, NULL, 2, '2006-01-22 00:00:00', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (95, 4, 2, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, NULL, 2, '2006-01-22 00:00:00', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (96, 1, 5, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #30', 2, '2006-01-22 00:00:00', 5); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (97, 2, 7, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #33', 2, '2006-01-22 00:00:00', 7); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (98, 2, 4, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #36', 2, '2006-01-22 00:00:00', 4); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (99, 1, 3, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #38', 2, '2006-01-22 00:00:00', 3); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (100, 2, 9, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #39', 2, '2006-01-22 00:00:00', 9); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (101, 1, 2, '2006-01-14 00:00:00', '2006-01-22 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #40', 2, '2006-01-22 00:00:00', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (102, 1, 1, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #41', 2, '2006-04-04 00:00:00', 1); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (103, 2, 1, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #42', 2, '2006-04-04 00:00:00', 1); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (104, 2, 1, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #45', 2, '2006-04-04 00:00:00', 1); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (105, 5, 7, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, NULL, 0, 0, NULL, 0, 'Check', 'Purchase generated based on Order #46', 2, '2006-04-04 00:00:00', 7); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (106, 6, 7, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #46', 2, '2006-04-04 00:00:00', 7); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (107, 1, 6, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #47', 2, '2006-04-04 00:00:00', 6); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (108, 2, 4, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #48', 2, '2006-04-04 00:00:00', 4); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (109, 2, 4, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #48', 2, '2006-04-04 00:00:00', 4); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (110, 1, 3, '2006-03-24 00:00:00', '2006-03-24 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #49', 2, '2006-04-04 00:00:00', 3); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (111, 1, 2, '2006-03-31 00:00:00', '2006-03-31 00:00:00', 2, NULL, 0, 0, NULL, 0, NULL, 'Purchase generated based on Order #56', 2, '2006-04-04 00:00:00', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (140, 6, NULL, '2006-04-25 00:00:00', '2006-04-25 16:40:51', 2, NULL, 0, 0, NULL, 0, NULL, NULL, 2, '2006-04-25 16:41:33', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (141, 8, NULL, '2006-04-25 00:00:00', '2006-04-25 17:10:35', 2, NULL, 0, 0, NULL, 0, NULL, NULL, 2, '2006-04-25 17:10:55', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (142, 8, NULL, '2006-04-25 00:00:00', '2006-04-25 17:18:29', 2, NULL, 0, 0, NULL, 0, 'Check', NULL, 2, '2006-04-25 17:18:51', 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (146, 2, 2, '2006-04-26 18:26:37', '2006-04-26 18:26:37', 1, NULL, 0, 0, NULL, 0, NULL, NULL, NULL, NULL, 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (147, 7, 2, '2006-04-26 18:33:28', '2006-04-26 18:33:28', 1, NULL, 0, 0, NULL, 0, NULL, NULL, NULL, NULL, 2); -INSERT INTO `purchase_orders` (`id`, `supplier_id`, `created_by`, `submitted_date`, `creation_date`, `status_id`, `expected_date`, `shipping_fee`, `taxes`, `payment_date`, `payment_amount`, `payment_method`, `notes`, `approved_by`, `approved_date`, `submitted_by`) VALUES (148, 5, 2, '2006-04-26 18:33:52', '2006-04-26 18:33:52', 1, NULL, 0, 0, NULL, 0, NULL, NULL, NULL, NULL, 2); -# 28 records - -# -# Dumping data for table 'sales_reports' -# - -INSERT INTO `sales_reports` (`group_by`, `display`, `title`, `filter_row_source`, `default`) VALUES ('Category', 'Category', 'Sales By Category', 'SELECT DISTINCT [Category] FROM [products] ORDER BY [Category];', 0); -INSERT INTO `sales_reports` (`group_by`, `display`, `title`, `filter_row_source`, `default`) VALUES ('country_region', 'Country/Region', 'Sales By Country', 'SELECT DISTINCT [country_region] FROM [customers Extended] ORDER BY [country_region];', 0); -INSERT INTO `sales_reports` (`group_by`, `display`, `title`, `filter_row_source`, `default`) VALUES ('Customer ID', 'Customer', 'Sales By Customer', 'SELECT DISTINCT [Company] FROM [customers Extended] ORDER BY [Company];', 0); -INSERT INTO `sales_reports` (`group_by`, `display`, `title`, `filter_row_source`, `default`) VALUES ('employee_id', 'Employee', 'Sales By Employee', 'SELECT DISTINCT [Employee Name] FROM [employees Extended] ORDER BY [Employee Name];', 0); -INSERT INTO `sales_reports` (`group_by`, `display`, `title`, `filter_row_source`, `default`) VALUES ('Product ID', 'Product', 'Sales by Product', 'SELECT DISTINCT [Product Name] FROM [products] ORDER BY [Product Name];', 1); -# 5 records - -# -# Dumping data for table 'shippers' -# - -INSERT INTO `shippers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (1, 'Shipping Company A', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '123 Any Street', 'Memphis', 'TN', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `shippers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (2, 'Shipping Company B', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '123 Any Street', 'Memphis', 'TN', '99999', 'USA', NULL, NULL, ''); -INSERT INTO `shippers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (3, 'Shipping Company C', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '123 Any Street', 'Memphis', 'TN', '99999', 'USA', NULL, NULL, ''); -# 3 records - -# -# Dumping data for table 'strings' -# - -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (2, 'Northwind Traders'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (3, 'Cannot remove posted inventory!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (4, 'Back ordered product filled for Order #|'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (5, 'Discounted price below cost!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (6, 'Insufficient inventory.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (7, 'Insufficient inventory. Do you want to create a purchase order?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (8, 'Purchase orders were successfully created for | products'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (9, 'There are no products below their respective reorder levels'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (10, 'Must specify customer name!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (11, 'Restocking will generate purchase orders for all products below desired inventory levels. Do you want to continue?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (12, 'Cannot create purchase order. No suppliers listed for specified product'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (13, 'Discounted price is below cost!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (14, 'Do you want to continue?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (15, 'Order is already invoiced. Do you want to print the invoice?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (16, 'Order does not contain any line items'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (17, 'Cannot create invoice! Inventory has not been allocated for each specified product.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (18, 'Sorry, there are no sales in the specified time period'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (19, 'Product successfully restocked.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (21, 'Product does not need restocking! Product is already at desired inventory level.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (22, 'Product restocking failed!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (23, 'Invalid login specified!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (24, 'Must first select reported!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (25, 'Changing supplier will remove purchase line items, continue?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (26, 'Purchase orders were successfully submitted for | products. Do you want to view the restocking report?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (27, 'There was an error attempting to restock inventory levels.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (28, '| product(s) were successfully restocked. Do you want to view the restocking report?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (29, 'You cannot remove purchase line items already posted to inventory!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (30, 'There was an error removing one or more purchase line items.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (31, 'You cannot modify quantity for purchased product already received or posted to inventory.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (32, 'You cannot modify price for purchased product already received or posted to inventory.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (33, 'Product has been successfully posted to inventory.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (34, 'Sorry, product cannot be successfully posted to inventory.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (35, 'There are orders with this product on back order. Would you like to fill them now?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (36, 'Cannot post product to inventory without specifying received date!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (37, 'Do you want to post received product to inventory?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (38, 'Initialize purchase, orders, and inventory data?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (39, 'Must first specify employee name!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (40, 'Specified user must be logged in to approve purchase!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (41, 'Purchase order must contain completed line items before it can be approved'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (42, 'Sorry, you do not have permission to approve purchases.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (43, 'Purchase successfully approved'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (44, 'Purchase cannot be approved'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (45, 'Purchase successfully submitted for approval'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (46, 'Purchase cannot be submitted for approval'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (47, 'Sorry, purchase order does not contain line items'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (48, 'Do you want to cancel this order?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (49, 'Canceling an order will permanently delete the order. Are you sure you want to cancel?'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (100, 'Your order was successfully canceled.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (101, 'Cannot cancel an order that has items received and posted to inventory.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (102, 'There was an error trying to cancel this order.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (103, 'The invoice for this order has not yet been created.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (104, 'Shipping information is not complete. Please specify all shipping information and try again.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (105, 'Cannot mark as shipped. Order must first be invoiced!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (106, 'Cannot cancel an order that has already shipped!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (107, 'Must first specify salesperson!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (108, 'Order is now marked closed.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (109, 'Order must first be marked shipped before closing.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (110, 'Must first specify payment information!'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (111, 'There was an error attempting to restock inventory levels. | product(s) were successfully restocked.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (112, 'You must supply a Unit Cost.'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (113, 'Fill back ordered product, Order #|'); -INSERT INTO `strings` (`string_id`, `string_data`) VALUES (114, 'Purchase generated based on Order #|'); -# 62 records - -# -# Dumping data for table 'suppliers' -# - -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (1, 'Supplier A', 'Andersen', 'Elizabeth A.', NULL, 'Sales Manager', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (2, 'Supplier B', 'Weiler', 'Cornelia', NULL, 'Sales Manager', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (3, 'Supplier C', 'Kelley', 'Madeleine', NULL, 'Sales Representative', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (4, 'Supplier D', 'Sato', 'Naoki', NULL, 'Marketing Manager', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (5, 'Supplier E', 'Hernandez-Echevarria', 'Amaya', NULL, 'Sales Manager', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (6, 'Supplier F', 'Hayakawa', 'Satomi', NULL, 'Marketing Assistant', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (7, 'Supplier G', 'Glasson', 'Stuart', NULL, 'Marketing Manager', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (8, 'Supplier H', 'Dunton', 'Bryn Paul', NULL, 'Sales Representative', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (9, 'Supplier I', 'Sandberg', 'Mikael', NULL, 'Sales Manager', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -INSERT INTO `suppliers` (`id`, `company`, `last_name`, `first_name`, `email_address`, `job_title`, `business_phone`, `home_phone`, `mobile_phone`, `fax_number`, `address`, `city`, `state_province`, `zip_postal_code`, `country_region`, `web_page`, `notes`, `attachments`) VALUES (10, 'Supplier J', 'Sousa', 'Luis', NULL, 'Sales Manager', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ''); -# 10 records - -SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; -SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; \ No newline at end of file diff --git a/database-files/03_add_to_northwind.sql b/database-files/03_add_to_northwind.sql deleted file mode 100644 index 4587e2b616..0000000000 --- a/database-files/03_add_to_northwind.sql +++ /dev/null @@ -1,22 +0,0 @@ -USE northwind; - --- ----------------------------------------------------- --- Model Params Table and data added by Dr. Fontenot --- ----------------------------------------------------- -CREATE TABLE IF NOT EXISTS model1_param_vals( - sequence_number INTEGER AUTO_INCREMENT PRIMARY KEY, - beta_0 FLOAT, - beta_1 FLOAT, - beta_2 FLOAT -); - -INSERT INTO model1_param_vals(beta_0, beta_1, beta_2) values (0.1214, 0.2354, 0.3245); - -CREATE TABLE IF NOT EXISTS model1_params( - sequence_number INTEGER AUTO_INCREMENT PRIMARY KEY, - beta_vals varchar(100) -); - -INSERT INTO model1_params (beta_vals) VALUES ("[0.124, 0.2354, 0.3245]"); - -commit; diff --git a/database-files/ballwatchers-schema.sql b/database-files/ballwatchers-schema.sql new file mode 100644 index 0000000000..1a7c454ed1 --- /dev/null +++ b/database-files/ballwatchers-schema.sql @@ -0,0 +1,321 @@ +CREATE SCHEMA IF NOT EXISTS BallWatch; +USE BallWatch; + +-- Drop tables in correct order (respecting foreign key constraints) +DROP TABLE IF EXISTS ValidationReports; +DROP TABLE IF EXISTS CleanupHistory; +DROP TABLE IF EXISTS CleanupSchedule; +DROP TABLE IF EXISTS DataErrors; +DROP TABLE IF EXISTS ErrorLogs; +DROP TABLE IF EXISTS DataLoads; +DROP TABLE IF EXISTS SystemHealth; +DROP TABLE IF EXISTS GamePlans; +DROP TABLE IF EXISTS KeyMatchups; +DROP TABLE IF EXISTS GamePlayers; +DROP TABLE IF EXISTS PlayerGameStats; +DROP TABLE IF EXISTS PlayerMatchup; +DROP TABLE IF EXISTS Game; +DROP TABLE IF EXISTS TeamsPlayers; +DROP TABLE IF EXISTS DraftEvaluations; +DROP TABLE IF EXISTS PlayerLineups; +DROP TABLE IF EXISTS Teams; +DROP TABLE IF EXISTS Players; +DROP TABLE IF EXISTS LineupConfiguration; +DROP TABLE IF EXISTS Agent; +DROP TABLE IF EXISTS Users; + +CREATE TABLE IF NOT EXISTS Users ( + user_id INT PRIMARY KEY AUTO_INCREMENT, + email VARCHAR(100) UNIQUE NOT NULL, + username VARCHAR(50) UNIQUE NOT NULL, + role ENUM('admin', 'coach', 'gm', 'analyst', 'fan') NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + is_active BOOLEAN DEFAULT TRUE +); + +CREATE TABLE IF NOT EXISTS Agent ( + agent_id INT PRIMARY KEY AUTO_INCREMENT, + first_name VARCHAR(50) NOT NULL, + last_name VARCHAR(50) NOT NULL, + agency_name VARCHAR(100), + phone VARCHAR(20), + email VARCHAR(100) UNIQUE +); + +CREATE TABLE IF NOT EXISTS Players ( + player_id INT PRIMARY KEY AUTO_INCREMENT, + first_name VARCHAR(50) NOT NULL, + last_name VARCHAR(50) NOT NULL, + age INT CHECK (age > 0), + college VARCHAR(100), + position ENUM('Guard', 'Forward', 'Center', 'PG', 'SG', 'SF', 'PF', 'C'), + weight INT CHECK (weight > 0), + player_status ENUM('Active', 'Injured', 'Retired', 'G-League') DEFAULT 'Active', + agent_id INT, + height VARCHAR(10), + picture TEXT, + DOB DATE, + years_exp INT DEFAULT 0, + dominant_hand ENUM('Left', 'Right') DEFAULT 'Right', + expected_salary DECIMAL(12,2), + player_type VARCHAR(50), + current_salary DECIMAL(12,2), + draft_year INT, + CONSTRAINT FK_Players_Agent FOREIGN KEY (agent_id) REFERENCES Agent(agent_id) + ON UPDATE CASCADE ON DELETE SET NULL +); + +CREATE TABLE IF NOT EXISTS Teams ( + team_id INT PRIMARY KEY AUTO_INCREMENT, + name VARCHAR(100) NOT NULL, + city VARCHAR(50), + state VARCHAR(50), + arena VARCHAR(100), + conference ENUM('Eastern', 'Western') NOT NULL, + division VARCHAR(50) NOT NULL, + coach VARCHAR(100), + gm VARCHAR(100), + owner VARCHAR(100), + championships INT DEFAULT 0, + founded_year INT, + offensive_system VARCHAR(100), + defensive_system VARCHAR(100) +); + +CREATE TABLE IF NOT EXISTS TeamsPlayers ( + player_id INT, + team_id INT, + joined_date DATE, + jersey_num INT CHECK (jersey_num BETWEEN 0 AND 99), + left_date DATE, + status VARCHAR(50) DEFAULT 'active', + PRIMARY KEY (player_id, team_id, joined_date), + CONSTRAINT FK_TeamsPlayers_Players FOREIGN KEY (player_id) + REFERENCES Players(player_id) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT FK_TeamsPlayers_Teams FOREIGN KEY (team_id) + REFERENCES Teams(team_id) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT CHK_Dates CHECK (left_date IS NULL OR left_date >= joined_date) +); + +-- Fixed Game table with all required columns +CREATE TABLE IF NOT EXISTS Game ( + game_id INT PRIMARY KEY AUTO_INCREMENT, + game_date DATE NOT NULL, + game_time TIME, + home_team_id INT NOT NULL, + away_team_id INT NOT NULL, + home_score INT DEFAULT 0, + away_score INT DEFAULT 0, + season VARCHAR(20) NOT NULL, + game_type ENUM('regular', 'playoff') DEFAULT 'regular', + status ENUM('scheduled', 'in_progress', 'completed') DEFAULT 'scheduled', + attendance INT, + venue VARCHAR(100), + CONSTRAINT FK_Game_HomeTeam FOREIGN KEY (home_team_id) + REFERENCES Teams(team_id) ON UPDATE CASCADE ON DELETE RESTRICT, + CONSTRAINT FK_Game_AwayTeam FOREIGN KEY (away_team_id) + REFERENCES Teams(team_id) ON UPDATE CASCADE ON DELETE RESTRICT +); + +-- Fixed PlayerGameStats with all required columns +CREATE TABLE IF NOT EXISTS PlayerGameStats ( + player_id INT NOT NULL, + game_id INT NOT NULL, + points INT DEFAULT 0, + rebounds INT DEFAULT 0, + assists INT DEFAULT 0, + steals INT DEFAULT 0, + blocks INT DEFAULT 0, + turnovers INT DEFAULT 0, + shooting_percentage DECIMAL(5,3), + three_point_percentage DECIMAL(5,3), + free_throw_percentage DECIMAL(5,3), + plus_minus INT DEFAULT 0, + minutes_played INT DEFAULT 0, + PRIMARY KEY (player_id, game_id), + CONSTRAINT FK_PlayerGameStats_Player FOREIGN KEY (player_id) + REFERENCES Players(player_id) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT FK_PlayerGameStats_Game FOREIGN KEY (game_id) + REFERENCES Game(game_id) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS PlayerMatchup ( + game_id INT, + offensive_player_id INT, + defensive_player_id INT, + offensive_rating DECIMAL(5,2), + defensive_rating DECIMAL(5,2), + possessions INT, + points_scored INT, + shooting_percentage DECIMAL(5,3), + PRIMARY KEY (game_id, offensive_player_id, defensive_player_id), + CONSTRAINT FK_PlayerMatchup_Game FOREIGN KEY (game_id) + REFERENCES Game(game_id) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT FK_PlayerMatchup_OffensivePlayer FOREIGN KEY (offensive_player_id) + REFERENCES Players(player_id) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT FK_PlayerMatchup_DefensivePlayer FOREIGN KEY (defensive_player_id) + REFERENCES Players(player_id) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS LineupConfiguration ( + lineup_id INT PRIMARY KEY AUTO_INCREMENT, + team_id INT NOT NULL, + quarter INT CHECK (quarter BETWEEN 1 AND 4), + time_on TIME, + time_off TIME, + plus_minus INT, + offensive_rating DECIMAL(5,2), + defensive_rating DECIMAL(5,2), + CONSTRAINT FK_LineupConfiguration_Teams FOREIGN KEY (team_id) + REFERENCES Teams(team_id) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS PlayerLineups ( + player_id INT, + lineup_id INT, + position_in_lineup VARCHAR(50), + PRIMARY KEY (player_id, lineup_id), + CONSTRAINT FK_PlayerLineups_Players FOREIGN KEY (player_id) + REFERENCES Players(player_id) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT FK_PlayerLineups_LineupConfiguration FOREIGN KEY (lineup_id) + REFERENCES LineupConfiguration(lineup_id) ON UPDATE CASCADE ON DELETE CASCADE +); + +-- Fixed table name: DraftEvaluations (not DraftEval) +CREATE TABLE IF NOT EXISTS DraftEvaluations ( + evaluation_id INT PRIMARY KEY AUTO_INCREMENT, + player_id INT NOT NULL, + overall_rating DECIMAL(5,2) CHECK (overall_rating BETWEEN 0 AND 100), + offensive_rating DECIMAL(5,2), + defensive_rating DECIMAL(5,2), + athleticism_rating DECIMAL(5,2), + potential_rating DECIMAL(5,2), + evaluation_type ENUM('prospect', 'free_agent', 'trade_target') DEFAULT 'prospect', + strengths TEXT, + weaknesses TEXT, + scout_notes TEXT, + projected_round INT, + comparison_player VARCHAR(100), + last_updated DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + UNIQUE KEY unique_player_evaluation (player_id), + CONSTRAINT FK_DraftEvaluations_Players FOREIGN KEY (player_id) + REFERENCES Players(player_id) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS KeyMatchups ( + matchup_id INT PRIMARY KEY AUTO_INCREMENT, + matchup_text TEXT NOT NULL +); + +-- Fixed table name: GamePlans (not GamePlan) +CREATE TABLE IF NOT EXISTS GamePlans ( + plan_id INT PRIMARY KEY AUTO_INCREMENT, + team_id INT NOT NULL, + opponent_id INT, + game_id INT, + plan_name VARCHAR(200) NOT NULL, + offensive_strategy TEXT, + defensive_strategy TEXT, + key_matchups TEXT, + special_instructions TEXT, + status ENUM('draft', 'active', 'archived') DEFAULT 'draft', + created_date DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_date DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + CONSTRAINT FK_GamePlans_Team FOREIGN KEY (team_id) + REFERENCES Teams(team_id) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT FK_GamePlans_Opponent FOREIGN KEY (opponent_id) + REFERENCES Teams(team_id) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT FK_GamePlans_Game FOREIGN KEY (game_id) + REFERENCES Game(game_id) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS SystemHealth ( + health_id INT PRIMARY KEY AUTO_INCREMENT, + check_time DATETIME DEFAULT CURRENT_TIMESTAMP, + service_name VARCHAR(100) NOT NULL, + error_rate_pct DECIMAL(5,2), + avg_response_time DECIMAL(10,2), + status ENUM('Healthy', 'Warning', 'Error', 'Critical') DEFAULT 'Healthy' +); + +CREATE TABLE IF NOT EXISTS ErrorLogs ( + error_id INT PRIMARY KEY AUTO_INCREMENT, + error_type VARCHAR(100), + severity ENUM('info', 'warning', 'error', 'critical') NOT NULL, + module VARCHAR(100), + error_message TEXT, + stack_trace TEXT, + user_id INT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + resolved_at DATETIME, + resolved_by VARCHAR(100), + resolution_notes TEXT, + CONSTRAINT FK_ErrorLogs_Users FOREIGN KEY (user_id) + REFERENCES Users(user_id) ON UPDATE CASCADE ON DELETE SET NULL +); + +CREATE TABLE IF NOT EXISTS DataLoads ( + load_id INT PRIMARY KEY AUTO_INCREMENT, + load_type VARCHAR(100), + status ENUM('pending', 'running', 'completed', 'failed') DEFAULT 'pending', + started_at DATETIME DEFAULT CURRENT_TIMESTAMP, + completed_at DATETIME, + records_processed INT DEFAULT 0, + records_failed INT DEFAULT 0, + error_message TEXT, + initiated_by VARCHAR(100), + source_file VARCHAR(255) +); + +CREATE TABLE IF NOT EXISTS DataErrors ( + data_error_id INT PRIMARY KEY AUTO_INCREMENT, + error_type ENUM('duplicate', 'missing', 'invalid') NOT NULL, + table_name VARCHAR(100) NOT NULL, + record_id VARCHAR(100), + field_name VARCHAR(100), + invalid_value TEXT, + expected_format VARCHAR(255), + detected_at DATETIME DEFAULT CURRENT_TIMESTAMP, + resolved_at DATETIME, + auto_fixed BOOLEAN DEFAULT FALSE +); + +-- Missing tables needed by admin routes +CREATE TABLE IF NOT EXISTS CleanupSchedule ( + schedule_id INT PRIMARY KEY AUTO_INCREMENT, + cleanup_type VARCHAR(100) NOT NULL, + frequency ENUM('daily', 'weekly', 'monthly') NOT NULL, + next_run DATETIME, + last_run DATETIME, + retention_days INT NOT NULL, + is_active BOOLEAN DEFAULT TRUE, + created_by VARCHAR(100), + created_at DATETIME DEFAULT CURRENT_TIMESTAMP +); + +CREATE TABLE IF NOT EXISTS CleanupHistory ( + history_id INT PRIMARY KEY AUTO_INCREMENT, + schedule_id INT, + cleanup_type VARCHAR(100), + started_at DATETIME, + completed_at DATETIME, + records_deleted INT, + status ENUM('started', 'completed', 'failed') DEFAULT 'started', + error_message TEXT, + CONSTRAINT FK_CleanupHistory_Schedule FOREIGN KEY (schedule_id) + REFERENCES CleanupSchedule(schedule_id) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS ValidationReports ( + validation_id INT PRIMARY KEY AUTO_INCREMENT, + validation_type VARCHAR(100) NOT NULL, + table_name VARCHAR(100) NOT NULL, + status ENUM('passed', 'failed', 'warning') NOT NULL, + total_records INT, + valid_records INT, + invalid_records INT, + validation_rules JSON, + error_details TEXT, + run_date DATETIME DEFAULT CURRENT_TIMESTAMP, + run_by VARCHAR(100) +); diff --git a/database-files/classicModels.sql b/database-files/classicModels.sql deleted file mode 100644 index 0b26e399ef..0000000000 --- a/database-files/classicModels.sql +++ /dev/null @@ -1,7933 +0,0 @@ -/* -********************************************************************* -http://www.mysqltutorial.org -********************************************************************* -Name: MySQL Sample Database classicmodels -Link: http://www.mysqltutorial.org/mysql-sample-database.aspx -Version 3.1 -+ changed data type from DOUBLE to DECIMAL for amount columns -Version 3.0 -+ changed DATETIME to DATE for some colunmns -Version 2.0 -+ changed table type from MyISAM to InnoDB -+ added foreign keys for all tables -********************************************************************* -*/ - - -/*!40101 SET NAMES utf8 */; - -/*!40101 SET SQL_MODE=''*/; - -/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; -/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; -/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; -/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -CREATE DATABASE /*!32312 IF NOT EXISTS*/`classicmodels` /*!40100 DEFAULT CHARACTER SET latin1 */; - -USE `classicmodels`; - -flush privileges; - -/*Table structure for table `customers` */ - -DROP TABLE IF EXISTS `customers`; - -CREATE TABLE `customers` ( - `customerNumber` int(11) NOT NULL, - `customerName` varchar(50) NOT NULL, - `contactLastName` varchar(50) NOT NULL, - `contactFirstName` varchar(50) NOT NULL, - `phone` varchar(50) NOT NULL, - `addressLine1` varchar(50) NOT NULL, - `addressLine2` varchar(50) DEFAULT NULL, - `city` varchar(50) NOT NULL, - `state` varchar(50) DEFAULT NULL, - `postalCode` varchar(15) DEFAULT NULL, - `country` varchar(50) NOT NULL, - `salesRepEmployeeNumber` int(11) DEFAULT NULL, - `creditLimit` decimal(10,2) DEFAULT NULL, - PRIMARY KEY (`customerNumber`), - KEY `salesRepEmployeeNumber` (`salesRepEmployeeNumber`), - CONSTRAINT `customers_ibfk_1` FOREIGN KEY (`salesRepEmployeeNumber`) REFERENCES `employees` (`employeeNumber`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; - -/*Data for the table `customers` */ - -insert into `customers`(`customerNumber`,`customerName`,`contactLastName`,`contactFirstName`,`phone`,`addressLine1`,`addressLine2`,`city`,`state`,`postalCode`,`country`,`salesRepEmployeeNumber`,`creditLimit`) values - -(103,'Atelier graphique','Schmitt','Carine ','40.32.2555','54, rue Royale',NULL,'Nantes',NULL,'44000','France',1370,'21000.00'), - -(112,'Signal Gift Stores','King','Jean','7025551838','8489 Strong St.',NULL,'Las Vegas','NV','83030','USA',1166,'71800.00'), - -(114,'Australian Collectors, Co.','Ferguson','Peter','03 9520 4555','636 St Kilda Road','Level 3','Melbourne','Victoria','3004','Australia',1611,'117300.00'), - -(119,'La Rochelle Gifts','Labrune','Janine ','40.67.8555','67, rue des Cinquante Otages',NULL,'Nantes',NULL,'44000','France',1370,'118200.00'), - -(121,'Baane Mini Imports','Bergulfsen','Jonas ','07-98 9555','Erling Skakkes gate 78',NULL,'Stavern',NULL,'4110','Norway',1504,'81700.00'), - -(124,'Mini Gifts Distributors Ltd.','Nelson','Susan','4155551450','5677 Strong St.',NULL,'San Rafael','CA','97562','USA',1165,'210500.00'), - -(125,'Havel & Zbyszek Co','Piestrzeniewicz','Zbyszek ','(26) 642-7555','ul. Filtrowa 68',NULL,'Warszawa',NULL,'01-012','Poland',NULL,'0.00'), - -(128,'Blauer See Auto, Co.','Keitel','Roland','+49 69 66 90 2555','Lyonerstr. 34',NULL,'Frankfurt',NULL,'60528','Germany',1504,'59700.00'), - -(129,'Mini Wheels Co.','Murphy','Julie','6505555787','5557 North Pendale Street',NULL,'San Francisco','CA','94217','USA',1165,'64600.00'), - -(131,'Land of Toys Inc.','Lee','Kwai','2125557818','897 Long Airport Avenue',NULL,'NYC','NY','10022','USA',1323,'114900.00'), - -(141,'Euro+ Shopping Channel','Freyre','Diego ','(91) 555 94 44','C/ Moralzarzal, 86',NULL,'Madrid',NULL,'28034','Spain',1370,'227600.00'), - -(144,'Volvo Model Replicas, Co','Berglund','Christina ','0921-12 3555','Berguvsvägen 8',NULL,'Luleå',NULL,'S-958 22','Sweden',1504,'53100.00'), - -(145,'Danish Wholesale Imports','Petersen','Jytte ','31 12 3555','Vinbæltet 34',NULL,'Kobenhavn',NULL,'1734','Denmark',1401,'83400.00'), - -(146,'Saveley & Henriot, Co.','Saveley','Mary ','78.32.5555','2, rue du Commerce',NULL,'Lyon',NULL,'69004','France',1337,'123900.00'), - -(148,'Dragon Souveniers, Ltd.','Natividad','Eric','+65 221 7555','Bronz Sok.','Bronz Apt. 3/6 Tesvikiye','Singapore',NULL,'079903','Singapore',1621,'103800.00'), - -(151,'Muscle Machine Inc','Young','Jeff','2125557413','4092 Furth Circle','Suite 400','NYC','NY','10022','USA',1286,'138500.00'), - -(157,'Diecast Classics Inc.','Leong','Kelvin','2155551555','7586 Pompton St.',NULL,'Allentown','PA','70267','USA',1216,'100600.00'), - -(161,'Technics Stores Inc.','Hashimoto','Juri','6505556809','9408 Furth Circle',NULL,'Burlingame','CA','94217','USA',1165,'84600.00'), - -(166,'Handji Gifts& Co','Victorino','Wendy','+65 224 1555','106 Linden Road Sandown','2nd Floor','Singapore',NULL,'069045','Singapore',1612,'97900.00'), - -(167,'Herkku Gifts','Oeztan','Veysel','+47 2267 3215','Brehmen St. 121','PR 334 Sentrum','Bergen',NULL,'N 5804','Norway ',1504,'96800.00'), - -(168,'American Souvenirs Inc','Franco','Keith','2035557845','149 Spinnaker Dr.','Suite 101','New Haven','CT','97823','USA',1286,'0.00'), - -(169,'Porto Imports Co.','de Castro','Isabel ','(1) 356-5555','Estrada da saúde n. 58',NULL,'Lisboa',NULL,'1756','Portugal',NULL,'0.00'), - -(171,'Daedalus Designs Imports','Rancé','Martine ','20.16.1555','184, chaussée de Tournai',NULL,'Lille',NULL,'59000','France',1370,'82900.00'), - -(172,'La Corne D\'abondance, Co.','Bertrand','Marie','(1) 42.34.2555','265, boulevard Charonne',NULL,'Paris',NULL,'75012','France',1337,'84300.00'), - -(173,'Cambridge Collectables Co.','Tseng','Jerry','6175555555','4658 Baden Av.',NULL,'Cambridge','MA','51247','USA',1188,'43400.00'), - -(175,'Gift Depot Inc.','King','Julie','2035552570','25593 South Bay Ln.',NULL,'Bridgewater','CT','97562','USA',1323,'84300.00'), - -(177,'Osaka Souveniers Co.','Kentary','Mory','+81 06 6342 5555','1-6-20 Dojima',NULL,'Kita-ku','Osaka',' 530-0003','Japan',1621,'81200.00'), - -(181,'Vitachrome Inc.','Frick','Michael','2125551500','2678 Kingston Rd.','Suite 101','NYC','NY','10022','USA',1286,'76400.00'), - -(186,'Toys of Finland, Co.','Karttunen','Matti','90-224 8555','Keskuskatu 45',NULL,'Helsinki',NULL,'21240','Finland',1501,'96500.00'), - -(187,'AV Stores, Co.','Ashworth','Rachel','(171) 555-1555','Fauntleroy Circus',NULL,'Manchester',NULL,'EC2 5NT','UK',1501,'136800.00'), - -(189,'Clover Collections, Co.','Cassidy','Dean','+353 1862 1555','25 Maiden Lane','Floor No. 4','Dublin',NULL,'2','Ireland',1504,'69400.00'), - -(198,'Auto-Moto Classics Inc.','Taylor','Leslie','6175558428','16780 Pompton St.',NULL,'Brickhaven','MA','58339','USA',1216,'23000.00'), - -(201,'UK Collectables, Ltd.','Devon','Elizabeth','(171) 555-2282','12, Berkeley Gardens Blvd',NULL,'Liverpool',NULL,'WX1 6LT','UK',1501,'92700.00'), - -(202,'Canadian Gift Exchange Network','Tamuri','Yoshi ','(604) 555-3392','1900 Oak St.',NULL,'Vancouver','BC','V3F 2K1','Canada',1323,'90300.00'), - -(204,'Online Mini Collectables','Barajas','Miguel','6175557555','7635 Spinnaker Dr.',NULL,'Brickhaven','MA','58339','USA',1188,'68700.00'), - -(205,'Toys4GrownUps.com','Young','Julie','6265557265','78934 Hillside Dr.',NULL,'Pasadena','CA','90003','USA',1166,'90700.00'), - -(206,'Asian Shopping Network, Co','Walker','Brydey','+612 9411 1555','Suntec Tower Three','8 Temasek','Singapore',NULL,'038988','Singapore',NULL,'0.00'), - -(209,'Mini Caravy','Citeaux','Frédérique ','88.60.1555','24, place Kléber',NULL,'Strasbourg',NULL,'67000','France',1370,'53800.00'), - -(211,'King Kong Collectables, Co.','Gao','Mike','+852 2251 1555','Bank of China Tower','1 Garden Road','Central Hong Kong',NULL,NULL,'Hong Kong',1621,'58600.00'), - -(216,'Enaco Distributors','Saavedra','Eduardo ','(93) 203 4555','Rambla de Cataluña, 23',NULL,'Barcelona',NULL,'08022','Spain',1702,'60300.00'), - -(219,'Boards & Toys Co.','Young','Mary','3105552373','4097 Douglas Av.',NULL,'Glendale','CA','92561','USA',1166,'11000.00'), - -(223,'Natürlich Autos','Kloss','Horst ','0372-555188','Taucherstraße 10',NULL,'Cunewalde',NULL,'01307','Germany',NULL,'0.00'), - -(227,'Heintze Collectables','Ibsen','Palle','86 21 3555','Smagsloget 45',NULL,'Århus',NULL,'8200','Denmark',1401,'120800.00'), - -(233,'Québec Home Shopping Network','Fresnière','Jean ','(514) 555-8054','43 rue St. Laurent',NULL,'Montréal','Québec','H1J 1C3','Canada',1286,'48700.00'), - -(237,'ANG Resellers','Camino','Alejandra ','(91) 745 6555','Gran Vía, 1',NULL,'Madrid',NULL,'28001','Spain',NULL,'0.00'), - -(239,'Collectable Mini Designs Co.','Thompson','Valarie','7605558146','361 Furth Circle',NULL,'San Diego','CA','91217','USA',1166,'105000.00'), - -(240,'giftsbymail.co.uk','Bennett','Helen ','(198) 555-8888','Garden House','Crowther Way 23','Cowes','Isle of Wight','PO31 7PJ','UK',1501,'93900.00'), - -(242,'Alpha Cognac','Roulet','Annette ','61.77.6555','1 rue Alsace-Lorraine',NULL,'Toulouse',NULL,'31000','France',1370,'61100.00'), - -(247,'Messner Shopping Network','Messner','Renate ','069-0555984','Magazinweg 7',NULL,'Frankfurt',NULL,'60528','Germany',NULL,'0.00'), - -(249,'Amica Models & Co.','Accorti','Paolo ','011-4988555','Via Monte Bianco 34',NULL,'Torino',NULL,'10100','Italy',1401,'113000.00'), - -(250,'Lyon Souveniers','Da Silva','Daniel','+33 1 46 62 7555','27 rue du Colonel Pierre Avia',NULL,'Paris',NULL,'75508','France',1337,'68100.00'), - -(256,'Auto Associés & Cie.','Tonini','Daniel ','30.59.8555','67, avenue de l\'Europe',NULL,'Versailles',NULL,'78000','France',1370,'77900.00'), - -(259,'Toms Spezialitäten, Ltd','Pfalzheim','Henriette ','0221-5554327','Mehrheimerstr. 369',NULL,'Köln',NULL,'50739','Germany',1504,'120400.00'), - -(260,'Royal Canadian Collectables, Ltd.','Lincoln','Elizabeth ','(604) 555-4555','23 Tsawassen Blvd.',NULL,'Tsawassen','BC','T2F 8M4','Canada',1323,'89600.00'), - -(273,'Franken Gifts, Co','Franken','Peter ','089-0877555','Berliner Platz 43',NULL,'München',NULL,'80805','Germany',NULL,'0.00'), - -(276,'Anna\'s Decorations, Ltd','O\'Hara','Anna','02 9936 8555','201 Miller Street','Level 15','North Sydney','NSW','2060','Australia',1611,'107800.00'), - -(278,'Rovelli Gifts','Rovelli','Giovanni ','035-640555','Via Ludovico il Moro 22',NULL,'Bergamo',NULL,'24100','Italy',1401,'119600.00'), - -(282,'Souveniers And Things Co.','Huxley','Adrian','+61 2 9495 8555','Monitor Money Building','815 Pacific Hwy','Chatswood','NSW','2067','Australia',1611,'93300.00'), - -(286,'Marta\'s Replicas Co.','Hernandez','Marta','6175558555','39323 Spinnaker Dr.',NULL,'Cambridge','MA','51247','USA',1216,'123700.00'), - -(293,'BG&E Collectables','Harrison','Ed','+41 26 425 50 01','Rte des Arsenaux 41 ',NULL,'Fribourg',NULL,'1700','Switzerland',NULL,'0.00'), - -(298,'Vida Sport, Ltd','Holz','Mihael','0897-034555','Grenzacherweg 237',NULL,'Genève',NULL,'1203','Switzerland',1702,'141300.00'), - -(299,'Norway Gifts By Mail, Co.','Klaeboe','Jan','+47 2212 1555','Drammensveien 126A','PB 211 Sentrum','Oslo',NULL,'N 0106','Norway ',1504,'95100.00'), - -(303,'Schuyler Imports','Schuyler','Bradley','+31 20 491 9555','Kingsfordweg 151',NULL,'Amsterdam',NULL,'1043 GR','Netherlands',NULL,'0.00'), - -(307,'Der Hund Imports','Andersen','Mel','030-0074555','Obere Str. 57',NULL,'Berlin',NULL,'12209','Germany',NULL,'0.00'), - -(311,'Oulu Toy Supplies, Inc.','Koskitalo','Pirkko','981-443655','Torikatu 38',NULL,'Oulu',NULL,'90110','Finland',1501,'90500.00'), - -(314,'Petit Auto','Dewey','Catherine ','(02) 5554 67','Rue Joseph-Bens 532',NULL,'Bruxelles',NULL,'B-1180','Belgium',1401,'79900.00'), - -(319,'Mini Classics','Frick','Steve','9145554562','3758 North Pendale Street',NULL,'White Plains','NY','24067','USA',1323,'102700.00'), - -(320,'Mini Creations Ltd.','Huang','Wing','5085559555','4575 Hillside Dr.',NULL,'New Bedford','MA','50553','USA',1188,'94500.00'), - -(321,'Corporate Gift Ideas Co.','Brown','Julie','6505551386','7734 Strong St.',NULL,'San Francisco','CA','94217','USA',1165,'105000.00'), - -(323,'Down Under Souveniers, Inc','Graham','Mike','+64 9 312 5555','162-164 Grafton Road','Level 2','Auckland ',NULL,NULL,'New Zealand',1612,'88000.00'), - -(324,'Stylish Desk Decors, Co.','Brown','Ann ','(171) 555-0297','35 King George',NULL,'London',NULL,'WX3 6FW','UK',1501,'77000.00'), - -(328,'Tekni Collectables Inc.','Brown','William','2015559350','7476 Moss Rd.',NULL,'Newark','NJ','94019','USA',1323,'43000.00'), - -(333,'Australian Gift Network, Co','Calaghan','Ben','61-7-3844-6555','31 Duncan St. West End',NULL,'South Brisbane','Queensland','4101','Australia',1611,'51600.00'), - -(334,'Suominen Souveniers','Suominen','Kalle','+358 9 8045 555','Software Engineering Center','SEC Oy','Espoo',NULL,'FIN-02271','Finland',1501,'98800.00'), - -(335,'Cramer Spezialitäten, Ltd','Cramer','Philip ','0555-09555','Maubelstr. 90',NULL,'Brandenburg',NULL,'14776','Germany',NULL,'0.00'), - -(339,'Classic Gift Ideas, Inc','Cervantes','Francisca','2155554695','782 First Street',NULL,'Philadelphia','PA','71270','USA',1188,'81100.00'), - -(344,'CAF Imports','Fernandez','Jesus','+34 913 728 555','Merchants House','27-30 Merchant\'s Quay','Madrid',NULL,'28023','Spain',1702,'59600.00'), - -(347,'Men \'R\' US Retailers, Ltd.','Chandler','Brian','2155554369','6047 Douglas Av.',NULL,'Los Angeles','CA','91003','USA',1166,'57700.00'), - -(348,'Asian Treasures, Inc.','McKenna','Patricia ','2967 555','8 Johnstown Road',NULL,'Cork','Co. Cork',NULL,'Ireland',NULL,'0.00'), - -(350,'Marseille Mini Autos','Lebihan','Laurence ','91.24.4555','12, rue des Bouchers',NULL,'Marseille',NULL,'13008','France',1337,'65000.00'), - -(353,'Reims Collectables','Henriot','Paul ','26.47.1555','59 rue de l\'Abbaye',NULL,'Reims',NULL,'51100','France',1337,'81100.00'), - -(356,'SAR Distributors, Co','Kuger','Armand','+27 21 550 3555','1250 Pretorius Street',NULL,'Hatfield','Pretoria','0028','South Africa',NULL,'0.00'), - -(357,'GiftsForHim.com','MacKinlay','Wales','64-9-3763555','199 Great North Road',NULL,'Auckland',NULL,NULL,'New Zealand',1612,'77700.00'), - -(361,'Kommission Auto','Josephs','Karin','0251-555259','Luisenstr. 48',NULL,'Münster',NULL,'44087','Germany',NULL,'0.00'), - -(362,'Gifts4AllAges.com','Yoshido','Juri','6175559555','8616 Spinnaker Dr.',NULL,'Boston','MA','51003','USA',1216,'41900.00'), - -(363,'Online Diecast Creations Co.','Young','Dorothy','6035558647','2304 Long Airport Avenue',NULL,'Nashua','NH','62005','USA',1216,'114200.00'), - -(369,'Lisboa Souveniers, Inc','Rodriguez','Lino ','(1) 354-2555','Jardim das rosas n. 32',NULL,'Lisboa',NULL,'1675','Portugal',NULL,'0.00'), - -(376,'Precious Collectables','Urs','Braun','0452-076555','Hauptstr. 29',NULL,'Bern',NULL,'3012','Switzerland',1702,'0.00'), - -(379,'Collectables For Less Inc.','Nelson','Allen','6175558555','7825 Douglas Av.',NULL,'Brickhaven','MA','58339','USA',1188,'70700.00'), - -(381,'Royale Belge','Cartrain','Pascale ','(071) 23 67 2555','Boulevard Tirou, 255',NULL,'Charleroi',NULL,'B-6000','Belgium',1401,'23500.00'), - -(382,'Salzburg Collectables','Pipps','Georg ','6562-9555','Geislweg 14',NULL,'Salzburg',NULL,'5020','Austria',1401,'71700.00'), - -(385,'Cruz & Sons Co.','Cruz','Arnold','+63 2 555 3587','15 McCallum Street','NatWest Center #13-03','Makati City',NULL,'1227 MM','Philippines',1621,'81500.00'), - -(386,'L\'ordine Souveniers','Moroni','Maurizio ','0522-556555','Strada Provinciale 124',NULL,'Reggio Emilia',NULL,'42100','Italy',1401,'121400.00'), - -(398,'Tokyo Collectables, Ltd','Shimamura','Akiko','+81 3 3584 0555','2-2-8 Roppongi',NULL,'Minato-ku','Tokyo','106-0032','Japan',1621,'94400.00'), - -(406,'Auto Canal+ Petit','Perrier','Dominique','(1) 47.55.6555','25, rue Lauriston',NULL,'Paris',NULL,'75016','France',1337,'95000.00'), - -(409,'Stuttgart Collectable Exchange','Müller','Rita ','0711-555361','Adenauerallee 900',NULL,'Stuttgart',NULL,'70563','Germany',NULL,'0.00'), - -(412,'Extreme Desk Decorations, Ltd','McRoy','Sarah','04 499 9555','101 Lambton Quay','Level 11','Wellington',NULL,NULL,'New Zealand',1612,'86800.00'), - -(415,'Bavarian Collectables Imports, Co.','Donnermeyer','Michael',' +49 89 61 08 9555','Hansastr. 15',NULL,'Munich',NULL,'80686','Germany',1504,'77000.00'), - -(424,'Classic Legends Inc.','Hernandez','Maria','2125558493','5905 Pompton St.','Suite 750','NYC','NY','10022','USA',1286,'67500.00'), - -(443,'Feuer Online Stores, Inc','Feuer','Alexander ','0342-555176','Heerstr. 22',NULL,'Leipzig',NULL,'04179','Germany',NULL,'0.00'), - -(447,'Gift Ideas Corp.','Lewis','Dan','2035554407','2440 Pompton St.',NULL,'Glendale','CT','97561','USA',1323,'49700.00'), - -(448,'Scandinavian Gift Ideas','Larsson','Martha','0695-34 6555','Åkergatan 24',NULL,'Bräcke',NULL,'S-844 67','Sweden',1504,'116400.00'), - -(450,'The Sharp Gifts Warehouse','Frick','Sue','4085553659','3086 Ingle Ln.',NULL,'San Jose','CA','94217','USA',1165,'77600.00'), - -(452,'Mini Auto Werke','Mendel','Roland ','7675-3555','Kirchgasse 6',NULL,'Graz',NULL,'8010','Austria',1401,'45300.00'), - -(455,'Super Scale Inc.','Murphy','Leslie','2035559545','567 North Pendale Street',NULL,'New Haven','CT','97823','USA',1286,'95400.00'), - -(456,'Microscale Inc.','Choi','Yu','2125551957','5290 North Pendale Street','Suite 200','NYC','NY','10022','USA',1286,'39800.00'), - -(458,'Corrida Auto Replicas, Ltd','Sommer','Martín ','(91) 555 22 82','C/ Araquil, 67',NULL,'Madrid',NULL,'28023','Spain',1702,'104600.00'), - -(459,'Warburg Exchange','Ottlieb','Sven ','0241-039123','Walserweg 21',NULL,'Aachen',NULL,'52066','Germany',NULL,'0.00'), - -(462,'FunGiftIdeas.com','Benitez','Violeta','5085552555','1785 First Street',NULL,'New Bedford','MA','50553','USA',1216,'85800.00'), - -(465,'Anton Designs, Ltd.','Anton','Carmen','+34 913 728555','c/ Gobelas, 19-1 Urb. La Florida',NULL,'Madrid',NULL,'28023','Spain',NULL,'0.00'), - -(471,'Australian Collectables, Ltd','Clenahan','Sean','61-9-3844-6555','7 Allen Street',NULL,'Glen Waverly','Victoria','3150','Australia',1611,'60300.00'), - -(473,'Frau da Collezione','Ricotti','Franco','+39 022515555','20093 Cologno Monzese','Alessandro Volta 16','Milan',NULL,NULL,'Italy',1401,'34800.00'), - -(475,'West Coast Collectables Co.','Thompson','Steve','3105553722','3675 Furth Circle',NULL,'Burbank','CA','94019','USA',1166,'55400.00'), - -(477,'Mit Vergnügen & Co.','Moos','Hanna ','0621-08555','Forsterstr. 57',NULL,'Mannheim',NULL,'68306','Germany',NULL,'0.00'), - -(480,'Kremlin Collectables, Co.','Semenov','Alexander ','+7 812 293 0521','2 Pobedy Square',NULL,'Saint Petersburg',NULL,'196143','Russia',NULL,'0.00'), - -(481,'Raanan Stores, Inc','Altagar,G M','Raanan','+ 972 9 959 8555','3 Hagalim Blv.',NULL,'Herzlia',NULL,'47625','Israel',NULL,'0.00'), - -(484,'Iberia Gift Imports, Corp.','Roel','José Pedro ','(95) 555 82 82','C/ Romero, 33',NULL,'Sevilla',NULL,'41101','Spain',1702,'65700.00'), - -(486,'Motor Mint Distributors Inc.','Salazar','Rosa','2155559857','11328 Douglas Av.',NULL,'Philadelphia','PA','71270','USA',1323,'72600.00'), - -(487,'Signal Collectibles Ltd.','Taylor','Sue','4155554312','2793 Furth Circle',NULL,'Brisbane','CA','94217','USA',1165,'60300.00'), - -(489,'Double Decker Gift Stores, Ltd','Smith','Thomas ','(171) 555-7555','120 Hanover Sq.',NULL,'London',NULL,'WA1 1DP','UK',1501,'43300.00'), - -(495,'Diecast Collectables','Franco','Valarie','6175552555','6251 Ingle Ln.',NULL,'Boston','MA','51003','USA',1188,'85100.00'), - -(496,'Kelly\'s Gift Shop','Snowden','Tony','+64 9 5555500','Arenales 1938 3\'A\'',NULL,'Auckland ',NULL,NULL,'New Zealand',1612,'110000.00'); - -/*Table structure for table `employees` */ - -DROP TABLE IF EXISTS `employees`; - -CREATE TABLE `employees` ( - `employeeNumber` int(11) NOT NULL, - `lastName` varchar(50) NOT NULL, - `firstName` varchar(50) NOT NULL, - `extension` varchar(10) NOT NULL, - `email` varchar(100) NOT NULL, - `officeCode` varchar(10) NOT NULL, - `reportsTo` int(11) DEFAULT NULL, - `jobTitle` varchar(50) NOT NULL, - PRIMARY KEY (`employeeNumber`), - KEY `reportsTo` (`reportsTo`), - KEY `officeCode` (`officeCode`), - CONSTRAINT `employees_ibfk_1` FOREIGN KEY (`reportsTo`) REFERENCES `employees` (`employeeNumber`), - CONSTRAINT `employees_ibfk_2` FOREIGN KEY (`officeCode`) REFERENCES `offices` (`officeCode`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; - -/*Data for the table `employees` */ - -insert into `employees`(`employeeNumber`,`lastName`,`firstName`,`extension`,`email`,`officeCode`,`reportsTo`,`jobTitle`) values - -(1002,'Murphy','Diane','x5800','dmurphy@classicmodelcars.com','1',NULL,'President'), - -(1056,'Patterson','Mary','x4611','mpatterso@classicmodelcars.com','1',1002,'VP Sales'), - -(1076,'Firrelli','Jeff','x9273','jfirrelli@classicmodelcars.com','1',1002,'VP Marketing'), - -(1088,'Patterson','William','x4871','wpatterson@classicmodelcars.com','6',1056,'Sales Manager (APAC)'), - -(1102,'Bondur','Gerard','x5408','gbondur@classicmodelcars.com','4',1056,'Sale Manager (EMEA)'), - -(1143,'Bow','Anthony','x5428','abow@classicmodelcars.com','1',1056,'Sales Manager (NA)'), - -(1165,'Jennings','Leslie','x3291','ljennings@classicmodelcars.com','1',1143,'Sales Rep'), - -(1166,'Thompson','Leslie','x4065','lthompson@classicmodelcars.com','1',1143,'Sales Rep'), - -(1188,'Firrelli','Julie','x2173','jfirrelli@classicmodelcars.com','2',1143,'Sales Rep'), - -(1216,'Patterson','Steve','x4334','spatterson@classicmodelcars.com','2',1143,'Sales Rep'), - -(1286,'Tseng','Foon Yue','x2248','ftseng@classicmodelcars.com','3',1143,'Sales Rep'), - -(1323,'Vanauf','George','x4102','gvanauf@classicmodelcars.com','3',1143,'Sales Rep'), - -(1337,'Bondur','Loui','x6493','lbondur@classicmodelcars.com','4',1102,'Sales Rep'), - -(1370,'Hernandez','Gerard','x2028','ghernande@classicmodelcars.com','4',1102,'Sales Rep'), - -(1401,'Castillo','Pamela','x2759','pcastillo@classicmodelcars.com','4',1102,'Sales Rep'), - -(1501,'Bott','Larry','x2311','lbott@classicmodelcars.com','7',1102,'Sales Rep'), - -(1504,'Jones','Barry','x102','bjones@classicmodelcars.com','7',1102,'Sales Rep'), - -(1611,'Fixter','Andy','x101','afixter@classicmodelcars.com','6',1088,'Sales Rep'), - -(1612,'Marsh','Peter','x102','pmarsh@classicmodelcars.com','6',1088,'Sales Rep'), - -(1619,'King','Tom','x103','tking@classicmodelcars.com','6',1088,'Sales Rep'), - -(1621,'Nishi','Mami','x101','mnishi@classicmodelcars.com','5',1056,'Sales Rep'), - -(1625,'Kato','Yoshimi','x102','ykato@classicmodelcars.com','5',1621,'Sales Rep'), - -(1702,'Gerard','Martin','x2312','mgerard@classicmodelcars.com','4',1102,'Sales Rep'); - -/*Table structure for table `offices` */ - -DROP TABLE IF EXISTS `offices`; - -CREATE TABLE `offices` ( - `officeCode` varchar(10) NOT NULL, - `city` varchar(50) NOT NULL, - `phone` varchar(50) NOT NULL, - `addressLine1` varchar(50) NOT NULL, - `addressLine2` varchar(50) DEFAULT NULL, - `state` varchar(50) DEFAULT NULL, - `country` varchar(50) NOT NULL, - `postalCode` varchar(15) NOT NULL, - `territory` varchar(10) NOT NULL, - PRIMARY KEY (`officeCode`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; - -/*Data for the table `offices` */ - -insert into `offices`(`officeCode`,`city`,`phone`,`addressLine1`,`addressLine2`,`state`,`country`,`postalCode`,`territory`) values - -('1','San Francisco','+1 650 219 4782','100 Market Street','Suite 300','CA','USA','94080','NA'), - -('2','Boston','+1 215 837 0825','1550 Court Place','Suite 102','MA','USA','02107','NA'), - -('3','NYC','+1 212 555 3000','523 East 53rd Street','apt. 5A','NY','USA','10022','NA'), - -('4','Paris','+33 14 723 4404','43 Rue Jouffroy D\'abbans',NULL,NULL,'France','75017','EMEA'), - -('5','Tokyo','+81 33 224 5000','4-1 Kioicho',NULL,'Chiyoda-Ku','Japan','102-8578','Japan'), - -('6','Sydney','+61 2 9264 2451','5-11 Wentworth Avenue','Floor #2',NULL,'Australia','NSW 2010','APAC'), - -('7','London','+44 20 7877 2041','25 Old Broad Street','Level 7',NULL,'UK','EC2N 1HN','EMEA'); - -/*Table structure for table `orderdetails` */ - -DROP TABLE IF EXISTS `orderdetails`; - -CREATE TABLE `orderdetails` ( - `orderNumber` int(11) NOT NULL, - `productCode` varchar(15) NOT NULL, - `quantityOrdered` int(11) NOT NULL, - `priceEach` decimal(10,2) NOT NULL, - `orderLineNumber` smallint(6) NOT NULL, - PRIMARY KEY (`orderNumber`,`productCode`), - KEY `productCode` (`productCode`), - CONSTRAINT `orderdetails_ibfk_1` FOREIGN KEY (`orderNumber`) REFERENCES `orders` (`orderNumber`), - CONSTRAINT `orderdetails_ibfk_2` FOREIGN KEY (`productCode`) REFERENCES `products` (`productCode`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; - -/*Data for the table `orderdetails` */ - -insert into `orderdetails`(`orderNumber`,`productCode`,`quantityOrdered`,`priceEach`,`orderLineNumber`) values - -(10100,'S18_1749',30,'136.00',3), - -(10100,'S18_2248',50,'55.09',2), - -(10100,'S18_4409',22,'75.46',4), - -(10100,'S24_3969',49,'35.29',1), - -(10101,'S18_2325',25,'108.06',4), - -(10101,'S18_2795',26,'167.06',1), - -(10101,'S24_1937',45,'32.53',3), - -(10101,'S24_2022',46,'44.35',2), - -(10102,'S18_1342',39,'95.55',2), - -(10102,'S18_1367',41,'43.13',1), - -(10103,'S10_1949',26,'214.30',11), - -(10103,'S10_4962',42,'119.67',4), - -(10103,'S12_1666',27,'121.64',8), - -(10103,'S18_1097',35,'94.50',10), - -(10103,'S18_2432',22,'58.34',2), - -(10103,'S18_2949',27,'92.19',12), - -(10103,'S18_2957',35,'61.84',14), - -(10103,'S18_3136',25,'86.92',13), - -(10103,'S18_3320',46,'86.31',16), - -(10103,'S18_4600',36,'98.07',5), - -(10103,'S18_4668',41,'40.75',9), - -(10103,'S24_2300',36,'107.34',1), - -(10103,'S24_4258',25,'88.62',15), - -(10103,'S32_1268',31,'92.46',3), - -(10103,'S32_3522',45,'63.35',7), - -(10103,'S700_2824',42,'94.07',6), - -(10104,'S12_3148',34,'131.44',1), - -(10104,'S12_4473',41,'111.39',9), - -(10104,'S18_2238',24,'135.90',8), - -(10104,'S18_2319',29,'122.73',12), - -(10104,'S18_3232',23,'165.95',13), - -(10104,'S18_4027',38,'119.20',3), - -(10104,'S24_1444',35,'52.02',6), - -(10104,'S24_2840',44,'30.41',10), - -(10104,'S24_4048',26,'106.45',5), - -(10104,'S32_2509',35,'51.95',11), - -(10104,'S32_3207',49,'56.55',4), - -(10104,'S50_1392',33,'114.59',7), - -(10104,'S50_1514',32,'53.31',2), - -(10105,'S10_4757',50,'127.84',2), - -(10105,'S12_1108',41,'205.72',15), - -(10105,'S12_3891',29,'141.88',14), - -(10105,'S18_3140',22,'136.59',11), - -(10105,'S18_3259',38,'87.73',13), - -(10105,'S18_4522',41,'75.48',10), - -(10105,'S24_2011',43,'117.97',9), - -(10105,'S24_3151',44,'73.46',4), - -(10105,'S24_3816',50,'75.47',1), - -(10105,'S700_1138',41,'54.00',5), - -(10105,'S700_1938',29,'86.61',12), - -(10105,'S700_2610',31,'60.72',3), - -(10105,'S700_3505',39,'92.16',6), - -(10105,'S700_3962',22,'99.31',7), - -(10105,'S72_3212',25,'44.77',8), - -(10106,'S18_1662',36,'134.04',12), - -(10106,'S18_2581',34,'81.10',2), - -(10106,'S18_3029',41,'80.86',18), - -(10106,'S18_3856',41,'94.22',17), - -(10106,'S24_1785',28,'107.23',4), - -(10106,'S24_2841',49,'65.77',13), - -(10106,'S24_3420',31,'55.89',14), - -(10106,'S24_3949',50,'55.96',11), - -(10106,'S24_4278',26,'71.00',3), - -(10106,'S32_4289',33,'65.35',5), - -(10106,'S50_1341',39,'35.78',6), - -(10106,'S700_1691',31,'91.34',7), - -(10106,'S700_2047',30,'85.09',16), - -(10106,'S700_2466',34,'99.72',9), - -(10106,'S700_2834',32,'113.90',1), - -(10106,'S700_3167',44,'76.00',8), - -(10106,'S700_4002',48,'70.33',10), - -(10106,'S72_1253',48,'43.70',15), - -(10107,'S10_1678',30,'81.35',2), - -(10107,'S10_2016',39,'105.86',5), - -(10107,'S10_4698',27,'172.36',4), - -(10107,'S12_2823',21,'122.00',1), - -(10107,'S18_2625',29,'52.70',6), - -(10107,'S24_1578',25,'96.92',3), - -(10107,'S24_2000',38,'73.12',7), - -(10107,'S32_1374',20,'88.90',8), - -(10108,'S12_1099',33,'165.38',6), - -(10108,'S12_3380',45,'96.30',4), - -(10108,'S12_3990',39,'75.81',7), - -(10108,'S12_4675',36,'107.10',3), - -(10108,'S18_1889',38,'67.76',2), - -(10108,'S18_3278',26,'73.17',9), - -(10108,'S18_3482',29,'132.29',8), - -(10108,'S18_3782',43,'52.84',12), - -(10108,'S18_4721',44,'139.87',11), - -(10108,'S24_2360',35,'64.41',15), - -(10108,'S24_3371',30,'60.01',5), - -(10108,'S24_3856',40,'132.00',1), - -(10108,'S24_4620',31,'67.10',10), - -(10108,'S32_2206',27,'36.21',13), - -(10108,'S32_4485',31,'87.76',16), - -(10108,'S50_4713',34,'74.85',14), - -(10109,'S18_1129',26,'117.48',4), - -(10109,'S18_1984',38,'137.98',3), - -(10109,'S18_2870',26,'126.72',1), - -(10109,'S18_3232',46,'160.87',5), - -(10109,'S18_3685',47,'125.74',2), - -(10109,'S24_2972',29,'32.10',6), - -(10110,'S18_1589',37,'118.22',16), - -(10110,'S18_1749',42,'153.00',7), - -(10110,'S18_2248',32,'51.46',6), - -(10110,'S18_2325',33,'115.69',4), - -(10110,'S18_2795',31,'163.69',1), - -(10110,'S18_4409',28,'81.91',8), - -(10110,'S18_4933',42,'62.00',9), - -(10110,'S24_1046',36,'72.02',13), - -(10110,'S24_1628',29,'43.27',15), - -(10110,'S24_1937',20,'28.88',3), - -(10110,'S24_2022',39,'40.77',2), - -(10110,'S24_2766',43,'82.69',11), - -(10110,'S24_2887',46,'112.74',10), - -(10110,'S24_3191',27,'80.47',12), - -(10110,'S24_3432',37,'96.37',14), - -(10110,'S24_3969',48,'35.29',5), - -(10111,'S18_1342',33,'87.33',6), - -(10111,'S18_1367',48,'48.52',5), - -(10111,'S18_2957',28,'53.09',2), - -(10111,'S18_3136',43,'94.25',1), - -(10111,'S18_3320',39,'91.27',4), - -(10111,'S24_4258',26,'85.70',3), - -(10112,'S10_1949',29,'197.16',1), - -(10112,'S18_2949',23,'85.10',2), - -(10113,'S12_1666',21,'121.64',2), - -(10113,'S18_1097',49,'101.50',4), - -(10113,'S18_4668',50,'43.27',3), - -(10113,'S32_3522',23,'58.82',1), - -(10114,'S10_4962',31,'128.53',8), - -(10114,'S18_2319',39,'106.78',3), - -(10114,'S18_2432',45,'53.48',6), - -(10114,'S18_3232',48,'169.34',4), - -(10114,'S18_4600',41,'105.34',9), - -(10114,'S24_2300',21,'102.23',5), - -(10114,'S24_2840',24,'28.64',1), - -(10114,'S32_1268',32,'88.61',7), - -(10114,'S32_2509',28,'43.83',2), - -(10114,'S700_2824',42,'82.94',10), - -(10115,'S12_4473',46,'111.39',5), - -(10115,'S18_2238',46,'140.81',4), - -(10115,'S24_1444',47,'56.64',2), - -(10115,'S24_4048',44,'106.45',1), - -(10115,'S50_1392',27,'100.70',3), - -(10116,'S32_3207',27,'60.28',1), - -(10117,'S12_1108',33,'195.33',9), - -(10117,'S12_3148',43,'148.06',10), - -(10117,'S12_3891',39,'173.02',8), - -(10117,'S18_3140',26,'121.57',5), - -(10117,'S18_3259',21,'81.68',7), - -(10117,'S18_4027',22,'122.08',12), - -(10117,'S18_4522',23,'73.73',4), - -(10117,'S24_2011',41,'119.20',3), - -(10117,'S50_1514',21,'55.65',11), - -(10117,'S700_1938',38,'75.35',6), - -(10117,'S700_3962',45,'89.38',1), - -(10117,'S72_3212',50,'52.42',2), - -(10118,'S700_3505',36,'86.15',1), - -(10119,'S10_4757',46,'112.88',11), - -(10119,'S18_1662',43,'151.38',3), - -(10119,'S18_3029',21,'74.84',9), - -(10119,'S18_3856',27,'95.28',8), - -(10119,'S24_2841',41,'64.40',4), - -(10119,'S24_3151',35,'72.58',13), - -(10119,'S24_3420',20,'63.12',5), - -(10119,'S24_3816',35,'82.18',10), - -(10119,'S24_3949',28,'62.10',2), - -(10119,'S700_1138',25,'57.34',14), - -(10119,'S700_2047',29,'74.23',7), - -(10119,'S700_2610',38,'67.22',12), - -(10119,'S700_4002',26,'63.67',1), - -(10119,'S72_1253',28,'40.22',6), - -(10120,'S10_2016',29,'118.94',3), - -(10120,'S10_4698',46,'158.80',2), - -(10120,'S18_2581',29,'82.79',8), - -(10120,'S18_2625',46,'57.54',4), - -(10120,'S24_1578',35,'110.45',1), - -(10120,'S24_1785',39,'93.01',10), - -(10120,'S24_2000',34,'72.36',5), - -(10120,'S24_4278',29,'71.73',9), - -(10120,'S32_1374',22,'94.90',6), - -(10120,'S32_4289',29,'68.79',11), - -(10120,'S50_1341',49,'41.46',12), - -(10120,'S700_1691',47,'91.34',13), - -(10120,'S700_2466',24,'81.77',15), - -(10120,'S700_2834',24,'106.79',7), - -(10120,'S700_3167',43,'72.00',14), - -(10121,'S10_1678',34,'86.13',5), - -(10121,'S12_2823',50,'126.52',4), - -(10121,'S24_2360',32,'58.18',2), - -(10121,'S32_4485',25,'95.93',3), - -(10121,'S50_4713',44,'72.41',1), - -(10122,'S12_1099',42,'155.66',10), - -(10122,'S12_3380',37,'113.92',8), - -(10122,'S12_3990',32,'65.44',11), - -(10122,'S12_4675',20,'104.80',7), - -(10122,'S18_1129',34,'114.65',2), - -(10122,'S18_1889',43,'62.37',6), - -(10122,'S18_1984',31,'113.80',1), - -(10122,'S18_3232',25,'137.17',3), - -(10122,'S18_3278',21,'69.15',13), - -(10122,'S18_3482',21,'133.76',12), - -(10122,'S18_3782',35,'59.06',16), - -(10122,'S18_4721',28,'145.82',15), - -(10122,'S24_2972',39,'34.74',4), - -(10122,'S24_3371',34,'50.82',9), - -(10122,'S24_3856',43,'136.22',5), - -(10122,'S24_4620',29,'67.10',14), - -(10122,'S32_2206',31,'33.79',17), - -(10123,'S18_1589',26,'120.71',2), - -(10123,'S18_2870',46,'114.84',3), - -(10123,'S18_3685',34,'117.26',4), - -(10123,'S24_1628',50,'43.27',1), - -(10124,'S18_1749',21,'153.00',6), - -(10124,'S18_2248',42,'58.12',5), - -(10124,'S18_2325',42,'111.87',3), - -(10124,'S18_4409',36,'75.46',7), - -(10124,'S18_4933',23,'66.28',8), - -(10124,'S24_1046',22,'62.47',12), - -(10124,'S24_1937',45,'30.53',2), - -(10124,'S24_2022',22,'36.29',1), - -(10124,'S24_2766',32,'74.51',10), - -(10124,'S24_2887',25,'93.95',9), - -(10124,'S24_3191',49,'76.19',11), - -(10124,'S24_3432',43,'101.73',13), - -(10124,'S24_3969',46,'36.11',4), - -(10125,'S18_1342',32,'89.38',1), - -(10125,'S18_2795',34,'138.38',2), - -(10126,'S10_1949',38,'205.73',11), - -(10126,'S10_4962',22,'122.62',4), - -(10126,'S12_1666',21,'135.30',8), - -(10126,'S18_1097',38,'116.67',10), - -(10126,'S18_1367',42,'51.21',17), - -(10126,'S18_2432',43,'51.05',2), - -(10126,'S18_2949',31,'93.21',12), - -(10126,'S18_2957',46,'61.84',14), - -(10126,'S18_3136',30,'93.20',13), - -(10126,'S18_3320',38,'94.25',16), - -(10126,'S18_4600',50,'102.92',5), - -(10126,'S18_4668',43,'47.29',9), - -(10126,'S24_2300',27,'122.68',1), - -(10126,'S24_4258',34,'83.76',15), - -(10126,'S32_1268',43,'82.83',3), - -(10126,'S32_3522',26,'62.05',7), - -(10126,'S700_2824',45,'97.10',6), - -(10127,'S12_1108',46,'193.25',2), - -(10127,'S12_3148',46,'140.50',3), - -(10127,'S12_3891',42,'169.56',1), - -(10127,'S12_4473',24,'100.73',11), - -(10127,'S18_2238',45,'140.81',10), - -(10127,'S18_2319',45,'114.14',14), - -(10127,'S18_3232',22,'149.02',15), - -(10127,'S18_4027',25,'126.39',5), - -(10127,'S24_1444',20,'50.86',8), - -(10127,'S24_2840',39,'34.30',12), - -(10127,'S24_4048',20,'107.63',7), - -(10127,'S32_2509',45,'46.53',13), - -(10127,'S32_3207',29,'60.90',6), - -(10127,'S50_1392',46,'111.12',9), - -(10127,'S50_1514',46,'55.65',4), - -(10128,'S18_3140',41,'120.20',2), - -(10128,'S18_3259',41,'80.67',4), - -(10128,'S18_4522',43,'77.24',1), - -(10128,'S700_1938',32,'72.75',3), - -(10129,'S10_4757',33,'123.76',2), - -(10129,'S24_2011',45,'113.06',9), - -(10129,'S24_3151',41,'81.43',4), - -(10129,'S24_3816',50,'76.31',1), - -(10129,'S700_1138',31,'58.67',5), - -(10129,'S700_2610',45,'72.28',3), - -(10129,'S700_3505',42,'90.15',6), - -(10129,'S700_3962',30,'94.34',7), - -(10129,'S72_3212',32,'44.23',8), - -(10130,'S18_3029',40,'68.82',2), - -(10130,'S18_3856',33,'99.52',1), - -(10131,'S18_1662',21,'141.92',4), - -(10131,'S24_2841',35,'60.97',5), - -(10131,'S24_3420',29,'52.60',6), - -(10131,'S24_3949',50,'54.59',3), - -(10131,'S700_2047',22,'76.94',8), - -(10131,'S700_2466',40,'86.76',1), - -(10131,'S700_4002',26,'63.67',2), - -(10131,'S72_1253',21,'40.22',7), - -(10132,'S700_3167',36,'80.00',1), - -(10133,'S18_2581',49,'80.26',3), - -(10133,'S24_1785',41,'109.42',5), - -(10133,'S24_4278',46,'61.58',4), - -(10133,'S32_1374',23,'80.91',1), - -(10133,'S32_4289',49,'67.41',6), - -(10133,'S50_1341',27,'37.09',7), - -(10133,'S700_1691',24,'76.73',8), - -(10133,'S700_2834',27,'115.09',2), - -(10134,'S10_1678',41,'90.92',2), - -(10134,'S10_2016',27,'116.56',5), - -(10134,'S10_4698',31,'187.85',4), - -(10134,'S12_2823',20,'131.04',1), - -(10134,'S18_2625',30,'51.48',6), - -(10134,'S24_1578',35,'94.67',3), - -(10134,'S24_2000',43,'75.41',7), - -(10135,'S12_1099',42,'173.17',7), - -(10135,'S12_3380',48,'110.39',5), - -(10135,'S12_3990',24,'72.62',8), - -(10135,'S12_4675',29,'103.64',4), - -(10135,'S18_1889',48,'66.99',3), - -(10135,'S18_3278',45,'65.94',10), - -(10135,'S18_3482',42,'139.64',9), - -(10135,'S18_3782',45,'49.74',13), - -(10135,'S18_4721',31,'133.92',12), - -(10135,'S24_2360',29,'67.18',16), - -(10135,'S24_2972',20,'34.36',1), - -(10135,'S24_3371',27,'52.05',6), - -(10135,'S24_3856',47,'139.03',2), - -(10135,'S24_4620',23,'76.80',11), - -(10135,'S32_2206',33,'38.62',14), - -(10135,'S32_4485',30,'91.85',17), - -(10135,'S50_4713',44,'78.92',15), - -(10136,'S18_1129',25,'117.48',2), - -(10136,'S18_1984',36,'120.91',1), - -(10136,'S18_3232',41,'169.34',3), - -(10137,'S18_1589',44,'115.73',2), - -(10137,'S18_2870',37,'110.88',3), - -(10137,'S18_3685',31,'118.68',4), - -(10137,'S24_1628',26,'40.25',1), - -(10138,'S18_1749',33,'149.60',6), - -(10138,'S18_2248',22,'51.46',5), - -(10138,'S18_2325',38,'114.42',3), - -(10138,'S18_4409',47,'79.15',7), - -(10138,'S18_4933',23,'64.86',8), - -(10138,'S24_1046',45,'59.53',12), - -(10138,'S24_1937',22,'33.19',2), - -(10138,'S24_2022',33,'38.53',1), - -(10138,'S24_2766',28,'73.60',10), - -(10138,'S24_2887',30,'96.30',9), - -(10138,'S24_3191',49,'77.05',11), - -(10138,'S24_3432',21,'99.58',13), - -(10138,'S24_3969',29,'32.82',4), - -(10139,'S18_1342',31,'89.38',7), - -(10139,'S18_1367',49,'52.83',6), - -(10139,'S18_2795',41,'151.88',8), - -(10139,'S18_2949',46,'91.18',1), - -(10139,'S18_2957',20,'52.47',3), - -(10139,'S18_3136',20,'101.58',2), - -(10139,'S18_3320',30,'81.35',5), - -(10139,'S24_4258',29,'93.49',4), - -(10140,'S10_1949',37,'186.44',11), - -(10140,'S10_4962',26,'131.49',4), - -(10140,'S12_1666',38,'118.90',8), - -(10140,'S18_1097',32,'95.67',10), - -(10140,'S18_2432',46,'51.05',2), - -(10140,'S18_4600',40,'100.50',5), - -(10140,'S18_4668',29,'40.25',9), - -(10140,'S24_2300',47,'118.84',1), - -(10140,'S32_1268',26,'87.64',3), - -(10140,'S32_3522',28,'62.05',7), - -(10140,'S700_2824',36,'101.15',6), - -(10141,'S12_4473',21,'114.95',5), - -(10141,'S18_2238',39,'160.46',4), - -(10141,'S18_2319',47,'103.09',8), - -(10141,'S18_3232',34,'143.94',9), - -(10141,'S24_1444',20,'50.86',2), - -(10141,'S24_2840',21,'32.18',6), - -(10141,'S24_4048',40,'104.09',1), - -(10141,'S32_2509',24,'53.03',7), - -(10141,'S50_1392',44,'94.92',3), - -(10142,'S12_1108',33,'166.24',12), - -(10142,'S12_3148',33,'140.50',13), - -(10142,'S12_3891',46,'167.83',11), - -(10142,'S18_3140',47,'129.76',8), - -(10142,'S18_3259',22,'95.80',10), - -(10142,'S18_4027',24,'122.08',15), - -(10142,'S18_4522',24,'79.87',7), - -(10142,'S24_2011',33,'114.29',6), - -(10142,'S24_3151',49,'74.35',1), - -(10142,'S32_3207',42,'60.90',16), - -(10142,'S50_1514',42,'56.24',14), - -(10142,'S700_1138',41,'55.34',2), - -(10142,'S700_1938',43,'77.08',9), - -(10142,'S700_3505',21,'92.16',3), - -(10142,'S700_3962',38,'91.37',4), - -(10142,'S72_3212',39,'46.96',5), - -(10143,'S10_4757',49,'133.28',15), - -(10143,'S18_1662',32,'126.15',7), - -(10143,'S18_3029',46,'70.54',13), - -(10143,'S18_3856',34,'99.52',12), - -(10143,'S24_2841',27,'63.71',8), - -(10143,'S24_3420',33,'59.83',9), - -(10143,'S24_3816',23,'74.64',14), - -(10143,'S24_3949',28,'55.96',6), - -(10143,'S50_1341',34,'34.91',1), - -(10143,'S700_1691',36,'86.77',2), - -(10143,'S700_2047',26,'87.80',11), - -(10143,'S700_2466',26,'79.78',4), - -(10143,'S700_2610',31,'69.39',16), - -(10143,'S700_3167',28,'70.40',3), - -(10143,'S700_4002',34,'65.15',5), - -(10143,'S72_1253',37,'49.66',10), - -(10144,'S32_4289',20,'56.41',1), - -(10145,'S10_1678',45,'76.56',6), - -(10145,'S10_2016',37,'104.67',9), - -(10145,'S10_4698',33,'154.93',8), - -(10145,'S12_2823',49,'146.10',5), - -(10145,'S18_2581',30,'71.81',14), - -(10145,'S18_2625',30,'52.70',10), - -(10145,'S24_1578',43,'103.68',7), - -(10145,'S24_1785',40,'87.54',16), - -(10145,'S24_2000',47,'63.98',11), - -(10145,'S24_2360',27,'56.10',3), - -(10145,'S24_4278',33,'71.73',15), - -(10145,'S32_1374',33,'99.89',12), - -(10145,'S32_2206',31,'39.43',1), - -(10145,'S32_4485',27,'95.93',4), - -(10145,'S50_4713',38,'73.22',2), - -(10145,'S700_2834',20,'113.90',13), - -(10146,'S18_3782',47,'60.30',2), - -(10146,'S18_4721',29,'130.94',1), - -(10147,'S12_1099',48,'161.49',7), - -(10147,'S12_3380',31,'110.39',5), - -(10147,'S12_3990',21,'74.21',8), - -(10147,'S12_4675',33,'97.89',4), - -(10147,'S18_1889',26,'70.84',3), - -(10147,'S18_3278',36,'74.78',10), - -(10147,'S18_3482',37,'129.35',9), - -(10147,'S24_2972',25,'33.23',1), - -(10147,'S24_3371',30,'48.98',6), - -(10147,'S24_3856',23,'123.58',2), - -(10147,'S24_4620',31,'72.76',11), - -(10148,'S18_1129',23,'114.65',13), - -(10148,'S18_1589',47,'108.26',9), - -(10148,'S18_1984',25,'136.56',12), - -(10148,'S18_2870',27,'113.52',10), - -(10148,'S18_3232',32,'143.94',14), - -(10148,'S18_3685',28,'135.63',11), - -(10148,'S18_4409',34,'83.75',1), - -(10148,'S18_4933',29,'66.28',2), - -(10148,'S24_1046',25,'65.41',6), - -(10148,'S24_1628',47,'46.29',8), - -(10148,'S24_2766',21,'77.24',4), - -(10148,'S24_2887',34,'115.09',3), - -(10148,'S24_3191',31,'71.91',5), - -(10148,'S24_3432',27,'96.37',7), - -(10149,'S18_1342',50,'87.33',4), - -(10149,'S18_1367',30,'48.52',3), - -(10149,'S18_1749',34,'156.40',11), - -(10149,'S18_2248',24,'50.85',10), - -(10149,'S18_2325',33,'125.86',8), - -(10149,'S18_2795',23,'167.06',5), - -(10149,'S18_3320',42,'89.29',2), - -(10149,'S24_1937',36,'31.20',7), - -(10149,'S24_2022',49,'39.87',6), - -(10149,'S24_3969',26,'38.57',9), - -(10149,'S24_4258',20,'90.57',1), - -(10150,'S10_1949',45,'182.16',8), - -(10150,'S10_4962',20,'121.15',1), - -(10150,'S12_1666',30,'135.30',5), - -(10150,'S18_1097',34,'95.67',7), - -(10150,'S18_2949',47,'93.21',9), - -(10150,'S18_2957',30,'56.21',11), - -(10150,'S18_3136',26,'97.39',10), - -(10150,'S18_4600',49,'111.39',2), - -(10150,'S18_4668',30,'47.29',6), - -(10150,'S32_3522',49,'62.05',4), - -(10150,'S700_2824',20,'95.08',3), - -(10151,'S12_4473',24,'114.95',3), - -(10151,'S18_2238',43,'152.27',2), - -(10151,'S18_2319',49,'106.78',6), - -(10151,'S18_2432',39,'58.34',9), - -(10151,'S18_3232',21,'167.65',7), - -(10151,'S24_2300',42,'109.90',8), - -(10151,'S24_2840',30,'29.35',4), - -(10151,'S32_1268',27,'84.75',10), - -(10151,'S32_2509',41,'43.29',5), - -(10151,'S50_1392',26,'108.81',1), - -(10152,'S18_4027',35,'117.77',1), - -(10152,'S24_1444',25,'49.13',4), - -(10152,'S24_4048',23,'112.37',3), - -(10152,'S32_3207',33,'57.17',2), - -(10153,'S12_1108',20,'201.57',11), - -(10153,'S12_3148',42,'128.42',12), - -(10153,'S12_3891',49,'155.72',10), - -(10153,'S18_3140',31,'125.66',7), - -(10153,'S18_3259',29,'82.69',9), - -(10153,'S18_4522',22,'82.50',6), - -(10153,'S24_2011',40,'111.83',5), - -(10153,'S50_1514',31,'53.31',13), - -(10153,'S700_1138',43,'58.00',1), - -(10153,'S700_1938',31,'80.55',8), - -(10153,'S700_3505',50,'87.15',2), - -(10153,'S700_3962',20,'85.41',3), - -(10153,'S72_3212',50,'51.87',4), - -(10154,'S24_3151',31,'75.23',2), - -(10154,'S700_2610',36,'59.27',1), - -(10155,'S10_4757',32,'129.20',13), - -(10155,'S18_1662',38,'138.77',5), - -(10155,'S18_3029',44,'83.44',11), - -(10155,'S18_3856',29,'105.87',10), - -(10155,'S24_2841',23,'62.34',6), - -(10155,'S24_3420',34,'56.55',7), - -(10155,'S24_3816',37,'76.31',12), - -(10155,'S24_3949',44,'58.69',4), - -(10155,'S700_2047',32,'89.61',9), - -(10155,'S700_2466',20,'87.75',2), - -(10155,'S700_3167',43,'76.80',1), - -(10155,'S700_4002',44,'70.33',3), - -(10155,'S72_1253',34,'49.16',8), - -(10156,'S50_1341',20,'43.64',1), - -(10156,'S700_1691',48,'77.64',2), - -(10157,'S18_2581',33,'69.27',3), - -(10157,'S24_1785',40,'89.72',5), - -(10157,'S24_4278',33,'66.65',4), - -(10157,'S32_1374',34,'83.91',1), - -(10157,'S32_4289',28,'56.41',6), - -(10157,'S700_2834',48,'109.16',2), - -(10158,'S24_2000',22,'67.79',1), - -(10159,'S10_1678',49,'81.35',14), - -(10159,'S10_2016',37,'101.10',17), - -(10159,'S10_4698',22,'170.42',16), - -(10159,'S12_1099',41,'188.73',2), - -(10159,'S12_2823',38,'131.04',13), - -(10159,'S12_3990',24,'67.03',3), - -(10159,'S18_2625',42,'51.48',18), - -(10159,'S18_3278',21,'66.74',5), - -(10159,'S18_3482',25,'129.35',4), - -(10159,'S18_3782',21,'54.71',8), - -(10159,'S18_4721',32,'142.85',7), - -(10159,'S24_1578',44,'100.30',15), - -(10159,'S24_2360',27,'67.18',11), - -(10159,'S24_3371',50,'49.60',1), - -(10159,'S24_4620',23,'80.84',6), - -(10159,'S32_2206',35,'39.43',9), - -(10159,'S32_4485',23,'86.74',12), - -(10159,'S50_4713',31,'78.11',10), - -(10160,'S12_3380',46,'96.30',6), - -(10160,'S12_4675',50,'93.28',5), - -(10160,'S18_1889',38,'70.84',4), - -(10160,'S18_3232',20,'140.55',1), - -(10160,'S24_2972',42,'30.59',2), - -(10160,'S24_3856',35,'130.60',3), - -(10161,'S18_1129',28,'121.72',12), - -(10161,'S18_1589',43,'102.04',8), - -(10161,'S18_1984',48,'139.41',11), - -(10161,'S18_2870',23,'125.40',9), - -(10161,'S18_3685',36,'132.80',10), - -(10161,'S18_4933',25,'62.72',1), - -(10161,'S24_1046',37,'73.49',5), - -(10161,'S24_1628',23,'47.29',7), - -(10161,'S24_2766',20,'82.69',3), - -(10161,'S24_2887',25,'108.04',2), - -(10161,'S24_3191',20,'72.77',4), - -(10161,'S24_3432',30,'94.23',6), - -(10162,'S18_1342',48,'87.33',2), - -(10162,'S18_1367',45,'45.28',1), - -(10162,'S18_1749',29,'141.10',9), - -(10162,'S18_2248',27,'53.28',8), - -(10162,'S18_2325',38,'113.15',6), - -(10162,'S18_2795',48,'156.94',3), - -(10162,'S18_4409',39,'86.51',10), - -(10162,'S24_1937',37,'27.55',5), - -(10162,'S24_2022',43,'38.98',4), - -(10162,'S24_3969',37,'32.82',7), - -(10163,'S10_1949',21,'212.16',1), - -(10163,'S18_2949',31,'101.31',2), - -(10163,'S18_2957',48,'59.96',4), - -(10163,'S18_3136',40,'101.58',3), - -(10163,'S18_3320',43,'80.36',6), - -(10163,'S24_4258',42,'96.42',5), - -(10164,'S10_4962',21,'143.31',2), - -(10164,'S12_1666',49,'121.64',6), - -(10164,'S18_1097',36,'103.84',8), - -(10164,'S18_4600',45,'107.76',3), - -(10164,'S18_4668',25,'46.29',7), - -(10164,'S32_1268',24,'91.49',1), - -(10164,'S32_3522',49,'57.53',5), - -(10164,'S700_2824',39,'86.99',4), - -(10165,'S12_1108',44,'168.32',3), - -(10165,'S12_3148',34,'123.89',4), - -(10165,'S12_3891',27,'152.26',2), - -(10165,'S12_4473',48,'109.02',12), - -(10165,'S18_2238',29,'134.26',11), - -(10165,'S18_2319',46,'120.28',15), - -(10165,'S18_2432',31,'60.77',18), - -(10165,'S18_3232',47,'154.10',16), - -(10165,'S18_3259',50,'84.71',1), - -(10165,'S18_4027',28,'123.51',6), - -(10165,'S24_1444',25,'46.82',9), - -(10165,'S24_2300',32,'117.57',17), - -(10165,'S24_2840',27,'31.12',13), - -(10165,'S24_4048',24,'106.45',8), - -(10165,'S32_2509',48,'50.86',14), - -(10165,'S32_3207',44,'55.30',7), - -(10165,'S50_1392',48,'106.49',10), - -(10165,'S50_1514',38,'49.21',5), - -(10166,'S18_3140',43,'136.59',2), - -(10166,'S18_4522',26,'72.85',1), - -(10166,'S700_1938',29,'76.22',3), - -(10167,'S10_4757',44,'123.76',9), - -(10167,'S18_1662',43,'141.92',1), - -(10167,'S18_3029',46,'69.68',7), - -(10167,'S18_3856',34,'84.70',6), - -(10167,'S24_2011',33,'110.60',16), - -(10167,'S24_2841',21,'54.81',2), - -(10167,'S24_3151',20,'77.00',11), - -(10167,'S24_3420',32,'64.44',3), - -(10167,'S24_3816',29,'73.80',8), - -(10167,'S700_1138',43,'66.00',12), - -(10167,'S700_2047',29,'87.80',5), - -(10167,'S700_2610',46,'62.16',10), - -(10167,'S700_3505',24,'85.14',13), - -(10167,'S700_3962',28,'83.42',14), - -(10167,'S72_1253',40,'42.71',4), - -(10167,'S72_3212',38,'43.68',15), - -(10168,'S10_1678',36,'94.74',1), - -(10168,'S10_2016',27,'97.53',4), - -(10168,'S10_4698',20,'160.74',3), - -(10168,'S18_2581',21,'75.19',9), - -(10168,'S18_2625',46,'49.06',5), - -(10168,'S24_1578',50,'103.68',2), - -(10168,'S24_1785',49,'93.01',11), - -(10168,'S24_2000',29,'72.36',6), - -(10168,'S24_3949',27,'57.32',18), - -(10168,'S24_4278',48,'68.10',10), - -(10168,'S32_1374',28,'89.90',7), - -(10168,'S32_4289',31,'57.78',12), - -(10168,'S50_1341',48,'39.71',13), - -(10168,'S700_1691',28,'91.34',14), - -(10168,'S700_2466',31,'87.75',16), - -(10168,'S700_2834',36,'94.92',8), - -(10168,'S700_3167',48,'72.00',15), - -(10168,'S700_4002',39,'67.37',17), - -(10169,'S12_1099',30,'163.44',2), - -(10169,'S12_2823',35,'126.52',13), - -(10169,'S12_3990',36,'71.82',3), - -(10169,'S18_3278',32,'65.13',5), - -(10169,'S18_3482',36,'136.70',4), - -(10169,'S18_3782',38,'52.84',8), - -(10169,'S18_4721',33,'120.53',7), - -(10169,'S24_2360',38,'66.49',11), - -(10169,'S24_3371',34,'53.27',1), - -(10169,'S24_4620',24,'77.61',6), - -(10169,'S32_2206',26,'37.01',9), - -(10169,'S32_4485',34,'83.68',12), - -(10169,'S50_4713',48,'75.66',10), - -(10170,'S12_3380',47,'116.27',4), - -(10170,'S12_4675',41,'93.28',3), - -(10170,'S18_1889',20,'70.07',2), - -(10170,'S24_3856',34,'130.60',1), - -(10171,'S18_1129',35,'134.46',2), - -(10171,'S18_1984',35,'128.03',1), - -(10171,'S18_3232',39,'165.95',3), - -(10171,'S24_2972',36,'34.74',4), - -(10172,'S18_1589',42,'109.51',6), - -(10172,'S18_2870',39,'117.48',7), - -(10172,'S18_3685',48,'139.87',8), - -(10172,'S24_1046',32,'61.00',3), - -(10172,'S24_1628',34,'43.27',5), - -(10172,'S24_2766',22,'79.97',1), - -(10172,'S24_3191',24,'77.91',2), - -(10172,'S24_3432',22,'87.81',4), - -(10173,'S18_1342',43,'101.71',6), - -(10173,'S18_1367',48,'51.75',5), - -(10173,'S18_1749',24,'168.30',13), - -(10173,'S18_2248',26,'55.09',12), - -(10173,'S18_2325',31,'127.13',10), - -(10173,'S18_2795',22,'140.06',7), - -(10173,'S18_2957',28,'56.84',2), - -(10173,'S18_3136',31,'86.92',1), - -(10173,'S18_3320',29,'90.28',4), - -(10173,'S18_4409',21,'77.31',14), - -(10173,'S18_4933',39,'58.44',15), - -(10173,'S24_1937',31,'29.87',9), - -(10173,'S24_2022',27,'39.42',8), - -(10173,'S24_2887',23,'98.65',16), - -(10173,'S24_3969',35,'35.70',11), - -(10173,'S24_4258',22,'93.49',3), - -(10174,'S10_1949',34,'207.87',4), - -(10174,'S12_1666',43,'113.44',1), - -(10174,'S18_1097',48,'108.50',3), - -(10174,'S18_2949',46,'100.30',5), - -(10174,'S18_4668',49,'44.27',2), - -(10175,'S10_4962',33,'119.67',9), - -(10175,'S12_4473',26,'109.02',1), - -(10175,'S18_2319',48,'101.87',4), - -(10175,'S18_2432',41,'59.55',7), - -(10175,'S18_3232',29,'150.71',5), - -(10175,'S18_4600',47,'102.92',10), - -(10175,'S24_2300',28,'121.40',6), - -(10175,'S24_2840',37,'32.18',2), - -(10175,'S32_1268',22,'89.57',8), - -(10175,'S32_2509',50,'50.86',3), - -(10175,'S32_3522',29,'56.24',12), - -(10175,'S700_2824',42,'80.92',11), - -(10176,'S12_1108',33,'166.24',2), - -(10176,'S12_3148',47,'145.04',3), - -(10176,'S12_3891',50,'160.91',1), - -(10176,'S18_2238',20,'139.17',10), - -(10176,'S18_4027',36,'140.75',5), - -(10176,'S24_1444',27,'55.49',8), - -(10176,'S24_4048',29,'101.72',7), - -(10176,'S32_3207',22,'62.14',6), - -(10176,'S50_1392',23,'109.96',9), - -(10176,'S50_1514',38,'52.14',4), - -(10177,'S18_3140',23,'113.37',9), - -(10177,'S18_3259',29,'92.77',11), - -(10177,'S18_4522',35,'82.50',8), - -(10177,'S24_2011',50,'115.52',7), - -(10177,'S24_3151',45,'79.66',2), - -(10177,'S700_1138',24,'58.67',3), - -(10177,'S700_1938',31,'77.95',10), - -(10177,'S700_2610',32,'64.33',1), - -(10177,'S700_3505',44,'88.15',4), - -(10177,'S700_3962',24,'83.42',5), - -(10177,'S72_3212',40,'52.96',6), - -(10178,'S10_4757',24,'131.92',12), - -(10178,'S18_1662',42,'127.73',4), - -(10178,'S18_3029',41,'70.54',10), - -(10178,'S18_3856',48,'104.81',9), - -(10178,'S24_2841',34,'67.82',5), - -(10178,'S24_3420',27,'65.75',6), - -(10178,'S24_3816',21,'68.77',11), - -(10178,'S24_3949',30,'64.15',3), - -(10178,'S700_2047',34,'86.90',8), - -(10178,'S700_2466',22,'91.74',1), - -(10178,'S700_4002',45,'68.11',2), - -(10178,'S72_1253',45,'41.71',7), - -(10179,'S18_2581',24,'82.79',3), - -(10179,'S24_1785',47,'105.04',5), - -(10179,'S24_4278',27,'66.65',4), - -(10179,'S32_1374',45,'86.90',1), - -(10179,'S32_4289',24,'63.97',6), - -(10179,'S50_1341',34,'43.20',7), - -(10179,'S700_1691',23,'75.81',8), - -(10179,'S700_2834',25,'98.48',2), - -(10179,'S700_3167',39,'80.00',9), - -(10180,'S10_1678',29,'76.56',9), - -(10180,'S10_2016',42,'99.91',12), - -(10180,'S10_4698',41,'164.61',11), - -(10180,'S12_2823',40,'131.04',8), - -(10180,'S18_2625',25,'48.46',13), - -(10180,'S18_3782',21,'59.06',3), - -(10180,'S18_4721',44,'147.31',2), - -(10180,'S24_1578',48,'98.05',10), - -(10180,'S24_2000',28,'61.70',14), - -(10180,'S24_2360',35,'60.95',6), - -(10180,'S24_4620',28,'68.71',1), - -(10180,'S32_2206',34,'33.39',4), - -(10180,'S32_4485',22,'102.05',7), - -(10180,'S50_4713',21,'74.85',5), - -(10181,'S12_1099',27,'155.66',14), - -(10181,'S12_3380',28,'113.92',12), - -(10181,'S12_3990',20,'67.03',15), - -(10181,'S12_4675',36,'107.10',11), - -(10181,'S18_1129',44,'124.56',6), - -(10181,'S18_1589',42,'124.44',2), - -(10181,'S18_1889',22,'74.69',10), - -(10181,'S18_1984',21,'129.45',5), - -(10181,'S18_2870',27,'130.68',3), - -(10181,'S18_3232',45,'147.33',7), - -(10181,'S18_3278',30,'73.17',17), - -(10181,'S18_3482',22,'120.53',16), - -(10181,'S18_3685',39,'137.04',4), - -(10181,'S24_1628',34,'45.28',1), - -(10181,'S24_2972',37,'32.85',8), - -(10181,'S24_3371',23,'54.49',13), - -(10181,'S24_3856',25,'122.17',9), - -(10182,'S18_1342',25,'83.22',3), - -(10182,'S18_1367',32,'44.21',2), - -(10182,'S18_1749',44,'159.80',10), - -(10182,'S18_2248',38,'54.49',9), - -(10182,'S18_2325',20,'105.52',7), - -(10182,'S18_2795',21,'135.00',4), - -(10182,'S18_3320',33,'86.31',1), - -(10182,'S18_4409',36,'88.35',11), - -(10182,'S18_4933',44,'61.29',12), - -(10182,'S24_1046',47,'63.20',16), - -(10182,'S24_1937',39,'31.86',6), - -(10182,'S24_2022',31,'39.87',5), - -(10182,'S24_2766',36,'87.24',14), - -(10182,'S24_2887',20,'116.27',13), - -(10182,'S24_3191',33,'73.62',15), - -(10182,'S24_3432',49,'95.30',17), - -(10182,'S24_3969',23,'34.88',8), - -(10183,'S10_1949',23,'180.01',8), - -(10183,'S10_4962',28,'127.06',1), - -(10183,'S12_1666',41,'114.80',5), - -(10183,'S18_1097',21,'108.50',7), - -(10183,'S18_2949',37,'91.18',9), - -(10183,'S18_2957',39,'51.22',11), - -(10183,'S18_3136',22,'90.06',10), - -(10183,'S18_4600',21,'118.66',2), - -(10183,'S18_4668',40,'42.26',6), - -(10183,'S24_4258',47,'81.81',12), - -(10183,'S32_3522',49,'52.36',4), - -(10183,'S700_2824',23,'85.98',3), - -(10184,'S12_4473',37,'105.47',6), - -(10184,'S18_2238',46,'145.72',5), - -(10184,'S18_2319',46,'119.05',9), - -(10184,'S18_2432',44,'60.77',12), - -(10184,'S18_3232',28,'165.95',10), - -(10184,'S24_1444',31,'57.22',3), - -(10184,'S24_2300',24,'117.57',11), - -(10184,'S24_2840',42,'30.06',7), - -(10184,'S24_4048',49,'114.73',2), - -(10184,'S32_1268',46,'84.75',13), - -(10184,'S32_2509',33,'52.49',8), - -(10184,'S32_3207',48,'59.03',1), - -(10184,'S50_1392',45,'92.60',4), - -(10185,'S12_1108',21,'195.33',13), - -(10185,'S12_3148',33,'146.55',14), - -(10185,'S12_3891',43,'147.07',12), - -(10185,'S18_3140',28,'124.30',9), - -(10185,'S18_3259',49,'94.79',11), - -(10185,'S18_4027',39,'127.82',16), - -(10185,'S18_4522',47,'87.77',8), - -(10185,'S24_2011',30,'105.69',7), - -(10185,'S24_3151',33,'83.20',2), - -(10185,'S50_1514',20,'46.86',15), - -(10185,'S700_1138',21,'64.67',3), - -(10185,'S700_1938',30,'79.68',10), - -(10185,'S700_2610',39,'61.44',1), - -(10185,'S700_3505',37,'99.17',4), - -(10185,'S700_3962',22,'93.35',5), - -(10185,'S72_3212',28,'47.50',6), - -(10186,'S10_4757',26,'108.80',9), - -(10186,'S18_1662',32,'137.19',1), - -(10186,'S18_3029',32,'73.12',7), - -(10186,'S18_3856',46,'98.46',6), - -(10186,'S24_2841',22,'60.29',2), - -(10186,'S24_3420',21,'59.83',3), - -(10186,'S24_3816',36,'68.77',8), - -(10186,'S700_2047',24,'80.56',5), - -(10186,'S72_1253',28,'42.71',4), - -(10187,'S18_2581',45,'70.12',1), - -(10187,'S24_1785',46,'96.29',3), - -(10187,'S24_3949',43,'55.96',10), - -(10187,'S24_4278',33,'64.48',2), - -(10187,'S32_4289',31,'61.22',4), - -(10187,'S50_1341',41,'39.71',5), - -(10187,'S700_1691',34,'84.95',6), - -(10187,'S700_2466',44,'95.73',8), - -(10187,'S700_3167',34,'72.00',7), - -(10187,'S700_4002',44,'70.33',9), - -(10188,'S10_1678',48,'95.70',1), - -(10188,'S10_2016',38,'111.80',4), - -(10188,'S10_4698',45,'182.04',3), - -(10188,'S18_2625',32,'52.09',5), - -(10188,'S24_1578',25,'95.80',2), - -(10188,'S24_2000',40,'61.70',6), - -(10188,'S32_1374',44,'81.91',7), - -(10188,'S700_2834',29,'96.11',8), - -(10189,'S12_2823',28,'138.57',1), - -(10190,'S24_2360',42,'58.87',3), - -(10190,'S32_2206',46,'38.62',1), - -(10190,'S32_4485',42,'89.80',4), - -(10190,'S50_4713',40,'67.53',2), - -(10191,'S12_1099',21,'155.66',3), - -(10191,'S12_3380',40,'104.52',1), - -(10191,'S12_3990',30,'70.22',4), - -(10191,'S18_3278',36,'75.59',6), - -(10191,'S18_3482',23,'119.06',5), - -(10191,'S18_3782',43,'60.93',9), - -(10191,'S18_4721',32,'136.90',8), - -(10191,'S24_3371',48,'53.27',2), - -(10191,'S24_4620',44,'77.61',7), - -(10192,'S12_4675',27,'99.04',16), - -(10192,'S18_1129',22,'140.12',11), - -(10192,'S18_1589',29,'100.80',7), - -(10192,'S18_1889',45,'70.84',15), - -(10192,'S18_1984',47,'128.03',10), - -(10192,'S18_2870',38,'110.88',8), - -(10192,'S18_3232',26,'137.17',12), - -(10192,'S18_3685',45,'125.74',9), - -(10192,'S24_1046',37,'72.02',4), - -(10192,'S24_1628',47,'49.30',6), - -(10192,'S24_2766',46,'86.33',2), - -(10192,'S24_2887',23,'112.74',1), - -(10192,'S24_2972',30,'33.23',13), - -(10192,'S24_3191',32,'69.34',3), - -(10192,'S24_3432',46,'93.16',5), - -(10192,'S24_3856',45,'112.34',14), - -(10193,'S18_1342',28,'92.47',7), - -(10193,'S18_1367',46,'46.36',6), - -(10193,'S18_1749',21,'153.00',14), - -(10193,'S18_2248',42,'60.54',13), - -(10193,'S18_2325',44,'115.69',11), - -(10193,'S18_2795',22,'143.44',8), - -(10193,'S18_2949',28,'87.13',1), - -(10193,'S18_2957',24,'53.09',3), - -(10193,'S18_3136',23,'97.39',2), - -(10193,'S18_3320',32,'79.37',5), - -(10193,'S18_4409',24,'92.03',15), - -(10193,'S18_4933',25,'66.28',16), - -(10193,'S24_1937',26,'32.19',10), - -(10193,'S24_2022',20,'44.80',9), - -(10193,'S24_3969',22,'38.16',12), - -(10193,'S24_4258',20,'92.52',4), - -(10194,'S10_1949',42,'203.59',11), - -(10194,'S10_4962',26,'134.44',4), - -(10194,'S12_1666',38,'124.37',8), - -(10194,'S18_1097',21,'103.84',10), - -(10194,'S18_2432',45,'51.05',2), - -(10194,'S18_4600',32,'113.82',5), - -(10194,'S18_4668',41,'47.79',9), - -(10194,'S24_2300',49,'112.46',1), - -(10194,'S32_1268',37,'77.05',3), - -(10194,'S32_3522',39,'61.41',7), - -(10194,'S700_2824',26,'80.92',6), - -(10195,'S12_4473',49,'118.50',6), - -(10195,'S18_2238',27,'139.17',5), - -(10195,'S18_2319',35,'112.91',9), - -(10195,'S18_3232',50,'150.71',10), - -(10195,'S24_1444',44,'54.33',3), - -(10195,'S24_2840',32,'31.82',7), - -(10195,'S24_4048',34,'95.81',2), - -(10195,'S32_2509',32,'51.95',8), - -(10195,'S32_3207',33,'59.03',1), - -(10195,'S50_1392',49,'97.23',4), - -(10196,'S12_1108',47,'203.64',5), - -(10196,'S12_3148',24,'151.08',6), - -(10196,'S12_3891',38,'147.07',4), - -(10196,'S18_3140',49,'127.03',1), - -(10196,'S18_3259',35,'81.68',3), - -(10196,'S18_4027',27,'126.39',8), - -(10196,'S50_1514',46,'56.82',7), - -(10196,'S700_1938',50,'84.88',2), - -(10197,'S10_4757',45,'118.32',6), - -(10197,'S18_3029',46,'83.44',4), - -(10197,'S18_3856',22,'85.75',3), - -(10197,'S18_4522',50,'78.99',14), - -(10197,'S24_2011',41,'109.37',13), - -(10197,'S24_3151',47,'83.20',8), - -(10197,'S24_3816',22,'67.93',5), - -(10197,'S700_1138',23,'60.00',9), - -(10197,'S700_2047',24,'78.75',2), - -(10197,'S700_2610',50,'66.50',7), - -(10197,'S700_3505',27,'100.17',10), - -(10197,'S700_3962',35,'88.39',11), - -(10197,'S72_1253',29,'39.73',1), - -(10197,'S72_3212',42,'48.59',12), - -(10198,'S18_1662',42,'149.81',4), - -(10198,'S24_2841',48,'60.97',5), - -(10198,'S24_3420',27,'61.81',6), - -(10198,'S24_3949',43,'65.51',3), - -(10198,'S700_2466',42,'94.73',1), - -(10198,'S700_4002',40,'74.03',2), - -(10199,'S50_1341',29,'37.97',1), - -(10199,'S700_1691',48,'81.29',2), - -(10199,'S700_3167',38,'70.40',3), - -(10200,'S18_2581',28,'74.34',3), - -(10200,'S24_1785',33,'99.57',5), - -(10200,'S24_4278',39,'70.28',4), - -(10200,'S32_1374',35,'80.91',1), - -(10200,'S32_4289',27,'65.35',6), - -(10200,'S700_2834',39,'115.09',2), - -(10201,'S10_1678',22,'82.30',2), - -(10201,'S10_2016',24,'116.56',5), - -(10201,'S10_4698',49,'191.72',4), - -(10201,'S12_2823',25,'126.52',1), - -(10201,'S18_2625',30,'48.46',6), - -(10201,'S24_1578',39,'93.54',3), - -(10201,'S24_2000',25,'66.27',7), - -(10202,'S18_3782',30,'55.33',3), - -(10202,'S18_4721',43,'124.99',2), - -(10202,'S24_2360',50,'56.10',6), - -(10202,'S24_4620',50,'75.18',1), - -(10202,'S32_2206',27,'33.39',4), - -(10202,'S32_4485',31,'81.64',7), - -(10202,'S50_4713',40,'79.73',5), - -(10203,'S12_1099',20,'161.49',8), - -(10203,'S12_3380',20,'111.57',6), - -(10203,'S12_3990',44,'63.84',9), - -(10203,'S12_4675',47,'115.16',5), - -(10203,'S18_1889',45,'73.15',4), - -(10203,'S18_3232',48,'157.49',1), - -(10203,'S18_3278',33,'66.74',11), - -(10203,'S18_3482',32,'127.88',10), - -(10203,'S24_2972',21,'33.23',2), - -(10203,'S24_3371',34,'56.94',7), - -(10203,'S24_3856',47,'140.43',3), - -(10204,'S18_1129',42,'114.65',17), - -(10204,'S18_1589',40,'113.24',13), - -(10204,'S18_1749',33,'153.00',4), - -(10204,'S18_1984',38,'133.72',16), - -(10204,'S18_2248',23,'59.33',3), - -(10204,'S18_2325',26,'119.50',1), - -(10204,'S18_2870',27,'106.92',14), - -(10204,'S18_3685',35,'132.80',15), - -(10204,'S18_4409',29,'83.75',5), - -(10204,'S18_4933',45,'69.84',6), - -(10204,'S24_1046',20,'69.82',10), - -(10204,'S24_1628',45,'46.79',12), - -(10204,'S24_2766',47,'79.06',8), - -(10204,'S24_2887',42,'112.74',7), - -(10204,'S24_3191',40,'84.75',9), - -(10204,'S24_3432',48,'104.94',11), - -(10204,'S24_3969',39,'34.88',2), - -(10205,'S18_1342',36,'98.63',2), - -(10205,'S18_1367',48,'45.82',1), - -(10205,'S18_2795',40,'138.38',3), - -(10205,'S24_1937',32,'27.88',5), - -(10205,'S24_2022',24,'36.74',4), - -(10206,'S10_1949',47,'203.59',6), - -(10206,'S12_1666',28,'109.34',3), - -(10206,'S18_1097',34,'115.50',5), - -(10206,'S18_2949',37,'98.27',7), - -(10206,'S18_2957',28,'51.84',9), - -(10206,'S18_3136',30,'102.63',8), - -(10206,'S18_3320',28,'99.21',11), - -(10206,'S18_4668',21,'45.78',4), - -(10206,'S24_4258',33,'95.44',10), - -(10206,'S32_3522',36,'54.94',2), - -(10206,'S700_2824',33,'89.01',1), - -(10207,'S10_4962',31,'125.58',15), - -(10207,'S12_4473',34,'95.99',7), - -(10207,'S18_2238',44,'140.81',6), - -(10207,'S18_2319',43,'109.23',10), - -(10207,'S18_2432',37,'60.77',13), - -(10207,'S18_3232',25,'140.55',11), - -(10207,'S18_4027',40,'143.62',1), - -(10207,'S18_4600',47,'119.87',16), - -(10207,'S24_1444',49,'57.80',4), - -(10207,'S24_2300',46,'127.79',12), - -(10207,'S24_2840',42,'30.76',8), - -(10207,'S24_4048',28,'108.82',3), - -(10207,'S32_1268',49,'84.75',14), - -(10207,'S32_2509',27,'51.95',9), - -(10207,'S32_3207',45,'55.30',2), - -(10207,'S50_1392',28,'106.49',5), - -(10208,'S12_1108',46,'176.63',13), - -(10208,'S12_3148',26,'128.42',14), - -(10208,'S12_3891',20,'152.26',12), - -(10208,'S18_3140',24,'117.47',9), - -(10208,'S18_3259',48,'96.81',11), - -(10208,'S18_4522',45,'72.85',8), - -(10208,'S24_2011',35,'122.89',7), - -(10208,'S24_3151',20,'80.54',2), - -(10208,'S50_1514',30,'57.99',15), - -(10208,'S700_1138',38,'56.67',3), - -(10208,'S700_1938',40,'73.62',10), - -(10208,'S700_2610',46,'63.61',1), - -(10208,'S700_3505',37,'95.16',4), - -(10208,'S700_3962',33,'95.34',5), - -(10208,'S72_3212',42,'48.05',6), - -(10209,'S10_4757',39,'129.20',8), - -(10209,'S18_3029',28,'82.58',6), - -(10209,'S18_3856',20,'97.40',5), - -(10209,'S24_2841',43,'66.45',1), - -(10209,'S24_3420',36,'56.55',2), - -(10209,'S24_3816',22,'79.67',7), - -(10209,'S700_2047',33,'90.52',4), - -(10209,'S72_1253',48,'44.20',3), - -(10210,'S10_2016',23,'112.99',2), - -(10210,'S10_4698',34,'189.79',1), - -(10210,'S18_1662',31,'141.92',17), - -(10210,'S18_2581',50,'68.43',7), - -(10210,'S18_2625',40,'51.48',3), - -(10210,'S24_1785',27,'100.67',9), - -(10210,'S24_2000',30,'63.22',4), - -(10210,'S24_3949',29,'56.64',16), - -(10210,'S24_4278',40,'68.10',8), - -(10210,'S32_1374',46,'84.91',5), - -(10210,'S32_4289',39,'57.10',10), - -(10210,'S50_1341',43,'43.20',11), - -(10210,'S700_1691',21,'87.69',12), - -(10210,'S700_2466',26,'93.74',14), - -(10210,'S700_2834',25,'98.48',6), - -(10210,'S700_3167',31,'64.00',13), - -(10210,'S700_4002',42,'60.70',15), - -(10211,'S10_1678',41,'90.92',14), - -(10211,'S12_1099',41,'171.22',2), - -(10211,'S12_2823',36,'126.52',13), - -(10211,'S12_3990',28,'79.80',3), - -(10211,'S18_3278',35,'73.17',5), - -(10211,'S18_3482',28,'138.17',4), - -(10211,'S18_3782',46,'60.30',8), - -(10211,'S18_4721',41,'148.80',7), - -(10211,'S24_1578',25,'109.32',15), - -(10211,'S24_2360',21,'62.33',11), - -(10211,'S24_3371',48,'52.66',1), - -(10211,'S24_4620',22,'80.84',6), - -(10211,'S32_2206',41,'39.83',9), - -(10211,'S32_4485',37,'94.91',12), - -(10211,'S50_4713',40,'70.78',10), - -(10212,'S12_3380',39,'99.82',16), - -(10212,'S12_4675',33,'110.55',15), - -(10212,'S18_1129',29,'117.48',10), - -(10212,'S18_1589',38,'105.77',6), - -(10212,'S18_1889',20,'64.68',14), - -(10212,'S18_1984',41,'133.72',9), - -(10212,'S18_2870',40,'117.48',7), - -(10212,'S18_3232',40,'155.79',11), - -(10212,'S18_3685',45,'115.85',8), - -(10212,'S24_1046',41,'61.73',3), - -(10212,'S24_1628',45,'43.27',5), - -(10212,'S24_2766',45,'81.78',1), - -(10212,'S24_2972',34,'37.38',12), - -(10212,'S24_3191',27,'77.91',2), - -(10212,'S24_3432',46,'100.66',4), - -(10212,'S24_3856',49,'117.96',13), - -(10213,'S18_4409',38,'84.67',1), - -(10213,'S18_4933',25,'58.44',2), - -(10213,'S24_2887',27,'97.48',3), - -(10214,'S18_1749',30,'166.60',7), - -(10214,'S18_2248',21,'53.28',6), - -(10214,'S18_2325',27,'125.86',4), - -(10214,'S18_2795',50,'167.06',1), - -(10214,'S24_1937',20,'32.19',3), - -(10214,'S24_2022',49,'39.87',2), - -(10214,'S24_3969',44,'38.57',5), - -(10215,'S10_1949',35,'205.73',3), - -(10215,'S18_1097',46,'100.34',2), - -(10215,'S18_1342',27,'92.47',10), - -(10215,'S18_1367',33,'53.91',9), - -(10215,'S18_2949',49,'97.26',4), - -(10215,'S18_2957',31,'56.21',6), - -(10215,'S18_3136',49,'89.01',5), - -(10215,'S18_3320',41,'84.33',8), - -(10215,'S18_4668',46,'42.76',1), - -(10215,'S24_4258',39,'94.47',7), - -(10216,'S12_1666',43,'133.94',1), - -(10217,'S10_4962',48,'132.97',4), - -(10217,'S18_2432',35,'58.34',2), - -(10217,'S18_4600',38,'118.66',5), - -(10217,'S24_2300',28,'103.51',1), - -(10217,'S32_1268',21,'78.97',3), - -(10217,'S32_3522',39,'56.24',7), - -(10217,'S700_2824',31,'90.02',6), - -(10218,'S18_2319',22,'110.46',1), - -(10218,'S18_3232',34,'152.41',2), - -(10219,'S12_4473',48,'94.80',2), - -(10219,'S18_2238',43,'132.62',1), - -(10219,'S24_2840',21,'31.12',3), - -(10219,'S32_2509',35,'47.62',4), - -(10220,'S12_1108',32,'189.10',2), - -(10220,'S12_3148',30,'151.08',3), - -(10220,'S12_3891',27,'166.10',1), - -(10220,'S18_4027',50,'126.39',5), - -(10220,'S24_1444',26,'48.55',8), - -(10220,'S24_4048',37,'101.72',7), - -(10220,'S32_3207',20,'49.71',6), - -(10220,'S50_1392',37,'92.60',9), - -(10220,'S50_1514',30,'56.82',4), - -(10221,'S18_3140',33,'133.86',3), - -(10221,'S18_3259',23,'89.75',5), - -(10221,'S18_4522',39,'84.26',2), - -(10221,'S24_2011',49,'113.06',1), - -(10221,'S700_1938',23,'69.29',4), - -(10222,'S10_4757',49,'133.28',12), - -(10222,'S18_1662',49,'137.19',4), - -(10222,'S18_3029',49,'79.14',10), - -(10222,'S18_3856',45,'88.93',9), - -(10222,'S24_2841',32,'56.86',5), - -(10222,'S24_3151',47,'74.35',14), - -(10222,'S24_3420',43,'61.15',6), - -(10222,'S24_3816',46,'77.99',11), - -(10222,'S24_3949',48,'55.27',3), - -(10222,'S700_1138',31,'58.67',15), - -(10222,'S700_2047',26,'80.56',8), - -(10222,'S700_2466',37,'90.75',1), - -(10222,'S700_2610',36,'69.39',13), - -(10222,'S700_3505',38,'84.14',16), - -(10222,'S700_3962',31,'81.43',17), - -(10222,'S700_4002',43,'66.63',2), - -(10222,'S72_1253',31,'45.19',7), - -(10222,'S72_3212',36,'48.59',18), - -(10223,'S10_1678',37,'80.39',1), - -(10223,'S10_2016',47,'110.61',4), - -(10223,'S10_4698',49,'189.79',3), - -(10223,'S18_2581',47,'67.58',9), - -(10223,'S18_2625',28,'58.75',5), - -(10223,'S24_1578',32,'104.81',2), - -(10223,'S24_1785',34,'87.54',11), - -(10223,'S24_2000',38,'60.94',6), - -(10223,'S24_4278',23,'68.10',10), - -(10223,'S32_1374',21,'90.90',7), - -(10223,'S32_4289',20,'66.73',12), - -(10223,'S50_1341',41,'41.02',13), - -(10223,'S700_1691',25,'84.03',14), - -(10223,'S700_2834',29,'113.90',8), - -(10223,'S700_3167',26,'79.20',15), - -(10224,'S12_2823',43,'141.58',6), - -(10224,'S18_3782',38,'57.20',1), - -(10224,'S24_2360',37,'60.26',4), - -(10224,'S32_2206',43,'37.01',2), - -(10224,'S32_4485',30,'94.91',5), - -(10224,'S50_4713',50,'81.36',3), - -(10225,'S12_1099',27,'157.60',9), - -(10225,'S12_3380',25,'101.00',7), - -(10225,'S12_3990',37,'64.64',10), - -(10225,'S12_4675',21,'100.19',6), - -(10225,'S18_1129',32,'116.06',1), - -(10225,'S18_1889',47,'71.61',5), - -(10225,'S18_3232',43,'162.57',2), - -(10225,'S18_3278',37,'69.96',12), - -(10225,'S18_3482',27,'119.06',11), - -(10225,'S18_4721',35,'135.41',14), - -(10225,'S24_2972',42,'34.74',3), - -(10225,'S24_3371',24,'51.43',8), - -(10225,'S24_3856',40,'130.60',4), - -(10225,'S24_4620',46,'77.61',13), - -(10226,'S18_1589',38,'108.26',4), - -(10226,'S18_1984',24,'129.45',7), - -(10226,'S18_2870',24,'125.40',5), - -(10226,'S18_3685',46,'122.91',6), - -(10226,'S24_1046',21,'65.41',1), - -(10226,'S24_1628',36,'47.79',3), - -(10226,'S24_3432',48,'95.30',2), - -(10227,'S18_1342',25,'85.27',3), - -(10227,'S18_1367',31,'50.14',2), - -(10227,'S18_1749',26,'136.00',10), - -(10227,'S18_2248',28,'59.93',9), - -(10227,'S18_2325',46,'118.23',7), - -(10227,'S18_2795',29,'146.81',4), - -(10227,'S18_3320',33,'99.21',1), - -(10227,'S18_4409',34,'87.43',11), - -(10227,'S18_4933',37,'70.56',12), - -(10227,'S24_1937',42,'27.22',6), - -(10227,'S24_2022',24,'39.42',5), - -(10227,'S24_2766',47,'84.51',14), - -(10227,'S24_2887',33,'102.17',13), - -(10227,'S24_3191',40,'78.76',15), - -(10227,'S24_3969',27,'34.88',8), - -(10228,'S10_1949',29,'214.30',2), - -(10228,'S18_1097',32,'100.34',1), - -(10228,'S18_2949',24,'101.31',3), - -(10228,'S18_2957',45,'57.46',5), - -(10228,'S18_3136',31,'100.53',4), - -(10228,'S24_4258',33,'84.73',6), - -(10229,'S10_4962',50,'138.88',9), - -(10229,'S12_1666',25,'110.70',13), - -(10229,'S12_4473',36,'95.99',1), - -(10229,'S18_2319',26,'104.32',4), - -(10229,'S18_2432',28,'53.48',7), - -(10229,'S18_3232',22,'157.49',5), - -(10229,'S18_4600',41,'119.87',10), - -(10229,'S18_4668',39,'43.77',14), - -(10229,'S24_2300',48,'115.01',6), - -(10229,'S24_2840',33,'34.65',2), - -(10229,'S32_1268',25,'78.97',8), - -(10229,'S32_2509',23,'49.78',3), - -(10229,'S32_3522',30,'52.36',12), - -(10229,'S700_2824',50,'91.04',11), - -(10230,'S12_3148',43,'128.42',1), - -(10230,'S18_2238',49,'153.91',8), - -(10230,'S18_4027',42,'142.18',3), - -(10230,'S24_1444',36,'47.40',6), - -(10230,'S24_4048',45,'99.36',5), - -(10230,'S32_3207',46,'59.03',4), - -(10230,'S50_1392',34,'100.70',7), - -(10230,'S50_1514',43,'57.41',2), - -(10231,'S12_1108',42,'193.25',2), - -(10231,'S12_3891',49,'147.07',1), - -(10232,'S18_3140',22,'133.86',6), - -(10232,'S18_3259',48,'97.81',8), - -(10232,'S18_4522',23,'78.12',5), - -(10232,'S24_2011',46,'113.06',4), - -(10232,'S700_1938',26,'84.88',7), - -(10232,'S700_3505',48,'86.15',1), - -(10232,'S700_3962',35,'81.43',2), - -(10232,'S72_3212',24,'48.59',3), - -(10233,'S24_3151',40,'70.81',2), - -(10233,'S700_1138',36,'66.00',3), - -(10233,'S700_2610',29,'67.94',1), - -(10234,'S10_4757',48,'118.32',9), - -(10234,'S18_1662',50,'146.65',1), - -(10234,'S18_3029',48,'84.30',7), - -(10234,'S18_3856',39,'85.75',6), - -(10234,'S24_2841',44,'67.14',2), - -(10234,'S24_3420',25,'65.09',3), - -(10234,'S24_3816',31,'78.83',8), - -(10234,'S700_2047',29,'83.28',5), - -(10234,'S72_1253',40,'45.69',4), - -(10235,'S18_2581',24,'81.95',3), - -(10235,'S24_1785',23,'89.72',5), - -(10235,'S24_3949',33,'55.27',12), - -(10235,'S24_4278',40,'63.03',4), - -(10235,'S32_1374',41,'90.90',1), - -(10235,'S32_4289',34,'66.73',6), - -(10235,'S50_1341',41,'37.09',7), - -(10235,'S700_1691',25,'88.60',8), - -(10235,'S700_2466',38,'92.74',10), - -(10235,'S700_2834',25,'116.28',2), - -(10235,'S700_3167',32,'73.60',9), - -(10235,'S700_4002',34,'70.33',11), - -(10236,'S10_2016',22,'105.86',1), - -(10236,'S18_2625',23,'52.70',2), - -(10236,'S24_2000',36,'65.51',3), - -(10237,'S10_1678',23,'91.87',7), - -(10237,'S10_4698',39,'158.80',9), - -(10237,'S12_2823',32,'129.53',6), - -(10237,'S18_3782',26,'49.74',1), - -(10237,'S24_1578',20,'109.32',8), - -(10237,'S24_2360',26,'62.33',4), - -(10237,'S32_2206',26,'35.00',2), - -(10237,'S32_4485',27,'94.91',5), - -(10237,'S50_4713',20,'78.92',3), - -(10238,'S12_1099',28,'161.49',3), - -(10238,'S12_3380',29,'104.52',1), - -(10238,'S12_3990',20,'73.42',4), - -(10238,'S18_3278',41,'68.35',6), - -(10238,'S18_3482',49,'144.05',5), - -(10238,'S18_4721',44,'120.53',8), - -(10238,'S24_3371',47,'53.88',2), - -(10238,'S24_4620',22,'67.91',7), - -(10239,'S12_4675',21,'100.19',5), - -(10239,'S18_1889',46,'70.07',4), - -(10239,'S18_3232',47,'135.47',1), - -(10239,'S24_2972',20,'32.47',2), - -(10239,'S24_3856',29,'133.41',3), - -(10240,'S18_1129',41,'125.97',3), - -(10240,'S18_1984',37,'136.56',2), - -(10240,'S18_3685',37,'134.22',1), - -(10241,'S18_1589',21,'119.46',11), - -(10241,'S18_1749',41,'153.00',2), - -(10241,'S18_2248',33,'55.70',1), - -(10241,'S18_2870',44,'126.72',12), - -(10241,'S18_4409',42,'77.31',3), - -(10241,'S18_4933',30,'62.72',4), - -(10241,'S24_1046',22,'72.02',8), - -(10241,'S24_1628',21,'47.29',10), - -(10241,'S24_2766',47,'89.05',6), - -(10241,'S24_2887',28,'117.44',5), - -(10241,'S24_3191',26,'69.34',7), - -(10241,'S24_3432',27,'107.08',9), - -(10242,'S24_3969',46,'36.52',1), - -(10243,'S18_2325',47,'111.87',2), - -(10243,'S24_1937',33,'30.87',1), - -(10244,'S18_1342',40,'99.66',7), - -(10244,'S18_1367',20,'48.52',6), - -(10244,'S18_2795',43,'141.75',8), - -(10244,'S18_2949',30,'87.13',1), - -(10244,'S18_2957',24,'54.96',3), - -(10244,'S18_3136',29,'85.87',2), - -(10244,'S18_3320',36,'87.30',5), - -(10244,'S24_2022',39,'42.11',9), - -(10244,'S24_4258',40,'97.39',4), - -(10245,'S10_1949',34,'195.01',9), - -(10245,'S10_4962',28,'147.74',2), - -(10245,'S12_1666',38,'120.27',6), - -(10245,'S18_1097',29,'114.34',8), - -(10245,'S18_4600',21,'111.39',3), - -(10245,'S18_4668',45,'48.80',7), - -(10245,'S32_1268',37,'81.86',1), - -(10245,'S32_3522',44,'54.94',5), - -(10245,'S700_2824',44,'81.93',4), - -(10246,'S12_4473',46,'99.54',5), - -(10246,'S18_2238',40,'144.08',4), - -(10246,'S18_2319',22,'100.64',8), - -(10246,'S18_2432',30,'57.73',11), - -(10246,'S18_3232',36,'145.63',9), - -(10246,'S24_1444',44,'46.24',2), - -(10246,'S24_2300',29,'118.84',10), - -(10246,'S24_2840',49,'34.65',6), - -(10246,'S24_4048',46,'100.54',1), - -(10246,'S32_2509',35,'45.45',7), - -(10246,'S50_1392',22,'113.44',3), - -(10247,'S12_1108',44,'195.33',2), - -(10247,'S12_3148',25,'140.50',3), - -(10247,'S12_3891',27,'167.83',1), - -(10247,'S18_4027',48,'143.62',5), - -(10247,'S32_3207',40,'58.41',6), - -(10247,'S50_1514',49,'51.55',4), - -(10248,'S10_4757',20,'126.48',3), - -(10248,'S18_3029',21,'80.86',1), - -(10248,'S18_3140',32,'133.86',12), - -(10248,'S18_3259',42,'95.80',14), - -(10248,'S18_4522',42,'87.77',11), - -(10248,'S24_2011',48,'122.89',10), - -(10248,'S24_3151',30,'85.85',5), - -(10248,'S24_3816',23,'83.02',2), - -(10248,'S700_1138',36,'66.00',6), - -(10248,'S700_1938',40,'81.41',13), - -(10248,'S700_2610',32,'69.39',4), - -(10248,'S700_3505',30,'84.14',7), - -(10248,'S700_3962',35,'92.36',8), - -(10248,'S72_3212',23,'53.51',9), - -(10249,'S18_3856',46,'88.93',5), - -(10249,'S24_2841',20,'54.81',1), - -(10249,'S24_3420',25,'65.75',2), - -(10249,'S700_2047',40,'85.99',4), - -(10249,'S72_1253',32,'49.16',3), - -(10250,'S18_1662',45,'148.23',14), - -(10250,'S18_2581',27,'84.48',4), - -(10250,'S24_1785',31,'95.20',6), - -(10250,'S24_2000',32,'63.22',1), - -(10250,'S24_3949',40,'61.42',13), - -(10250,'S24_4278',37,'72.45',5), - -(10250,'S32_1374',31,'99.89',2), - -(10250,'S32_4289',50,'62.60',7), - -(10250,'S50_1341',36,'36.66',8), - -(10250,'S700_1691',31,'91.34',9), - -(10250,'S700_2466',35,'90.75',11), - -(10250,'S700_2834',44,'98.48',3), - -(10250,'S700_3167',44,'76.00',10), - -(10250,'S700_4002',38,'65.89',12), - -(10251,'S10_1678',59,'93.79',2), - -(10251,'S10_2016',44,'115.37',5), - -(10251,'S10_4698',43,'172.36',4), - -(10251,'S12_2823',46,'129.53',1), - -(10251,'S18_2625',44,'58.15',6), - -(10251,'S24_1578',50,'91.29',3), - -(10252,'S18_3278',20,'74.78',2), - -(10252,'S18_3482',41,'145.52',1), - -(10252,'S18_3782',31,'50.36',5), - -(10252,'S18_4721',26,'127.97',4), - -(10252,'S24_2360',47,'63.03',8), - -(10252,'S24_4620',38,'69.52',3), - -(10252,'S32_2206',36,'36.21',6), - -(10252,'S32_4485',25,'93.89',9), - -(10252,'S50_4713',48,'72.41',7), - -(10253,'S12_1099',24,'157.60',13), - -(10253,'S12_3380',22,'102.17',11), - -(10253,'S12_3990',25,'67.03',14), - -(10253,'S12_4675',41,'109.40',10), - -(10253,'S18_1129',26,'130.22',5), - -(10253,'S18_1589',24,'103.29',1), - -(10253,'S18_1889',23,'67.76',9), - -(10253,'S18_1984',33,'130.87',4), - -(10253,'S18_2870',37,'114.84',2), - -(10253,'S18_3232',40,'145.63',6), - -(10253,'S18_3685',31,'139.87',3), - -(10253,'S24_2972',40,'34.74',7), - -(10253,'S24_3371',24,'50.82',12), - -(10253,'S24_3856',39,'115.15',8), - -(10254,'S18_1749',49,'137.70',5), - -(10254,'S18_2248',36,'55.09',4), - -(10254,'S18_2325',41,'102.98',2), - -(10254,'S18_4409',34,'80.99',6), - -(10254,'S18_4933',30,'59.87',7), - -(10254,'S24_1046',34,'66.88',11), - -(10254,'S24_1628',32,'43.27',13), - -(10254,'S24_1937',38,'28.88',1), - -(10254,'S24_2766',31,'85.42',9), - -(10254,'S24_2887',33,'111.57',8), - -(10254,'S24_3191',42,'69.34',10), - -(10254,'S24_3432',49,'101.73',12), - -(10254,'S24_3969',20,'39.80',3), - -(10255,'S18_2795',24,'135.00',1), - -(10255,'S24_2022',37,'37.63',2), - -(10256,'S18_1342',34,'93.49',2), - -(10256,'S18_1367',29,'52.83',1), - -(10257,'S18_2949',50,'92.19',1), - -(10257,'S18_2957',49,'59.34',3), - -(10257,'S18_3136',37,'83.78',2), - -(10257,'S18_3320',26,'91.27',5), - -(10257,'S24_4258',46,'81.81',4), - -(10258,'S10_1949',32,'177.87',6), - -(10258,'S12_1666',41,'133.94',3), - -(10258,'S18_1097',41,'113.17',5), - -(10258,'S18_4668',21,'49.81',4), - -(10258,'S32_3522',20,'62.70',2), - -(10258,'S700_2824',45,'86.99',1), - -(10259,'S10_4962',26,'121.15',12), - -(10259,'S12_4473',46,'117.32',4), - -(10259,'S18_2238',30,'134.26',3), - -(10259,'S18_2319',34,'120.28',7), - -(10259,'S18_2432',30,'59.55',10), - -(10259,'S18_3232',27,'152.41',8), - -(10259,'S18_4600',41,'107.76',13), - -(10259,'S24_1444',28,'46.82',1), - -(10259,'S24_2300',47,'121.40',9), - -(10259,'S24_2840',31,'31.47',5), - -(10259,'S32_1268',45,'95.35',11), - -(10259,'S32_2509',40,'45.99',6), - -(10259,'S50_1392',29,'105.33',2), - -(10260,'S12_1108',46,'180.79',5), - -(10260,'S12_3148',30,'140.50',6), - -(10260,'S12_3891',44,'169.56',4), - -(10260,'S18_3140',32,'121.57',1), - -(10260,'S18_3259',29,'92.77',3), - -(10260,'S18_4027',23,'137.88',8), - -(10260,'S24_4048',23,'117.10',10), - -(10260,'S32_3207',27,'55.30',9), - -(10260,'S50_1514',21,'56.24',7), - -(10260,'S700_1938',33,'80.55',2), - -(10261,'S10_4757',27,'116.96',1), - -(10261,'S18_4522',20,'80.75',9), - -(10261,'S24_2011',36,'105.69',8), - -(10261,'S24_3151',22,'79.66',3), - -(10261,'S700_1138',34,'64.00',4), - -(10261,'S700_2610',44,'58.55',2), - -(10261,'S700_3505',25,'89.15',5), - -(10261,'S700_3962',50,'88.39',6), - -(10261,'S72_3212',29,'43.68',7), - -(10262,'S18_1662',49,'157.69',9), - -(10262,'S18_3029',32,'81.72',15), - -(10262,'S18_3856',34,'85.75',14), - -(10262,'S24_1785',34,'98.48',1), - -(10262,'S24_2841',24,'63.71',10), - -(10262,'S24_3420',46,'65.75',11), - -(10262,'S24_3816',49,'82.18',16), - -(10262,'S24_3949',48,'58.69',8), - -(10262,'S32_4289',40,'63.97',2), - -(10262,'S50_1341',49,'35.78',3), - -(10262,'S700_1691',40,'87.69',4), - -(10262,'S700_2047',44,'83.28',13), - -(10262,'S700_2466',33,'81.77',6), - -(10262,'S700_3167',27,'64.80',5), - -(10262,'S700_4002',35,'64.41',7), - -(10262,'S72_1253',21,'41.71',12), - -(10263,'S10_1678',34,'89.00',2), - -(10263,'S10_2016',40,'107.05',5), - -(10263,'S10_4698',41,'193.66',4), - -(10263,'S12_2823',48,'123.51',1), - -(10263,'S18_2581',33,'67.58',10), - -(10263,'S18_2625',34,'50.27',6), - -(10263,'S24_1578',42,'109.32',3), - -(10263,'S24_2000',37,'67.03',7), - -(10263,'S24_4278',24,'59.41',11), - -(10263,'S32_1374',31,'93.90',8), - -(10263,'S700_2834',47,'117.46',9), - -(10264,'S18_3782',48,'58.44',3), - -(10264,'S18_4721',20,'124.99',2), - -(10264,'S24_2360',37,'61.64',6), - -(10264,'S24_4620',47,'75.18',1), - -(10264,'S32_2206',20,'39.02',4), - -(10264,'S32_4485',34,'100.01',7), - -(10264,'S50_4713',47,'67.53',5), - -(10265,'S18_3278',45,'74.78',2), - -(10265,'S18_3482',49,'123.47',1), - -(10266,'S12_1099',44,'188.73',14), - -(10266,'S12_3380',22,'110.39',12), - -(10266,'S12_3990',35,'67.83',15), - -(10266,'S12_4675',40,'112.86',11), - -(10266,'S18_1129',21,'131.63',6), - -(10266,'S18_1589',36,'99.55',2), - -(10266,'S18_1889',33,'77.00',10), - -(10266,'S18_1984',49,'139.41',5), - -(10266,'S18_2870',20,'113.52',3), - -(10266,'S18_3232',29,'137.17',7), - -(10266,'S18_3685',33,'127.15',4), - -(10266,'S24_1628',28,'40.25',1), - -(10266,'S24_2972',34,'35.12',8), - -(10266,'S24_3371',47,'56.33',13), - -(10266,'S24_3856',24,'119.37',9), - -(10267,'S18_4933',36,'71.27',1), - -(10267,'S24_1046',40,'72.02',5), - -(10267,'S24_2766',38,'76.33',3), - -(10267,'S24_2887',43,'93.95',2), - -(10267,'S24_3191',44,'83.90',4), - -(10267,'S24_3432',43,'98.51',6), - -(10268,'S18_1342',49,'93.49',3), - -(10268,'S18_1367',26,'45.82',2), - -(10268,'S18_1749',34,'164.90',10), - -(10268,'S18_2248',31,'60.54',9), - -(10268,'S18_2325',50,'124.59',7), - -(10268,'S18_2795',35,'148.50',4), - -(10268,'S18_3320',39,'96.23',1), - -(10268,'S18_4409',35,'84.67',11), - -(10268,'S24_1937',33,'31.86',6), - -(10268,'S24_2022',40,'36.29',5), - -(10268,'S24_3969',30,'37.75',8), - -(10269,'S18_2957',32,'57.46',1), - -(10269,'S24_4258',48,'95.44',2), - -(10270,'S10_1949',21,'171.44',9), - -(10270,'S10_4962',32,'124.10',2), - -(10270,'S12_1666',28,'135.30',6), - -(10270,'S18_1097',43,'94.50',8), - -(10270,'S18_2949',31,'81.05',10), - -(10270,'S18_3136',38,'85.87',11), - -(10270,'S18_4600',38,'107.76',3), - -(10270,'S18_4668',44,'40.25',7), - -(10270,'S32_1268',32,'93.42',1), - -(10270,'S32_3522',21,'52.36',5), - -(10270,'S700_2824',46,'101.15',4), - -(10271,'S12_4473',31,'99.54',5), - -(10271,'S18_2238',50,'147.36',4), - -(10271,'S18_2319',50,'121.50',8), - -(10271,'S18_2432',25,'59.55',11), - -(10271,'S18_3232',20,'169.34',9), - -(10271,'S24_1444',45,'49.71',2), - -(10271,'S24_2300',43,'122.68',10), - -(10271,'S24_2840',38,'28.64',6), - -(10271,'S24_4048',22,'110.00',1), - -(10271,'S32_2509',35,'51.95',7), - -(10271,'S50_1392',34,'93.76',3), - -(10272,'S12_1108',35,'187.02',2), - -(10272,'S12_3148',27,'123.89',3), - -(10272,'S12_3891',39,'148.80',1), - -(10272,'S18_4027',25,'126.39',5), - -(10272,'S32_3207',45,'56.55',6), - -(10272,'S50_1514',43,'53.89',4), - -(10273,'S10_4757',30,'136.00',4), - -(10273,'S18_3029',34,'84.30',2), - -(10273,'S18_3140',40,'117.47',13), - -(10273,'S18_3259',47,'87.73',15), - -(10273,'S18_3856',50,'105.87',1), - -(10273,'S18_4522',33,'72.85',12), - -(10273,'S24_2011',22,'103.23',11), - -(10273,'S24_3151',27,'84.08',6), - -(10273,'S24_3816',48,'83.86',3), - -(10273,'S700_1138',21,'66.00',7), - -(10273,'S700_1938',21,'77.95',14), - -(10273,'S700_2610',42,'57.82',5), - -(10273,'S700_3505',40,'91.15',8), - -(10273,'S700_3962',26,'89.38',9), - -(10273,'S72_3212',37,'51.32',10), - -(10274,'S18_1662',41,'129.31',1), - -(10274,'S24_2841',40,'56.86',2), - -(10274,'S24_3420',24,'65.09',3), - -(10274,'S700_2047',24,'75.13',5), - -(10274,'S72_1253',32,'49.66',4), - -(10275,'S10_1678',45,'81.35',1), - -(10275,'S10_2016',22,'115.37',4), - -(10275,'S10_4698',36,'154.93',3), - -(10275,'S18_2581',35,'70.12',9), - -(10275,'S18_2625',37,'52.09',5), - -(10275,'S24_1578',21,'105.94',2), - -(10275,'S24_1785',25,'97.38',11), - -(10275,'S24_2000',30,'61.70',6), - -(10275,'S24_3949',41,'58.00',18), - -(10275,'S24_4278',27,'67.38',10), - -(10275,'S32_1374',23,'89.90',7), - -(10275,'S32_4289',28,'58.47',12), - -(10275,'S50_1341',38,'40.15',13), - -(10275,'S700_1691',32,'85.86',14), - -(10275,'S700_2466',39,'82.77',16), - -(10275,'S700_2834',48,'102.04',8), - -(10275,'S700_3167',43,'72.00',15), - -(10275,'S700_4002',31,'59.96',17), - -(10276,'S12_1099',50,'184.84',3), - -(10276,'S12_2823',43,'150.62',14), - -(10276,'S12_3380',47,'104.52',1), - -(10276,'S12_3990',38,'67.83',4), - -(10276,'S18_3278',38,'78.00',6), - -(10276,'S18_3482',30,'139.64',5), - -(10276,'S18_3782',33,'54.71',9), - -(10276,'S18_4721',48,'120.53',8), - -(10276,'S24_2360',46,'61.64',12), - -(10276,'S24_3371',20,'58.17',2), - -(10276,'S24_4620',48,'67.10',7), - -(10276,'S32_2206',27,'35.40',10), - -(10276,'S32_4485',38,'94.91',13), - -(10276,'S50_4713',21,'67.53',11), - -(10277,'S12_4675',28,'93.28',1), - -(10278,'S18_1129',34,'114.65',6), - -(10278,'S18_1589',23,'107.02',2), - -(10278,'S18_1889',29,'73.15',10), - -(10278,'S18_1984',29,'118.07',5), - -(10278,'S18_2870',39,'117.48',3), - -(10278,'S18_3232',42,'167.65',7), - -(10278,'S18_3685',31,'114.44',4), - -(10278,'S24_1628',35,'48.80',1), - -(10278,'S24_2972',31,'37.38',8), - -(10278,'S24_3856',25,'136.22',9), - -(10279,'S18_4933',26,'68.42',1), - -(10279,'S24_1046',32,'68.35',5), - -(10279,'S24_2766',49,'76.33',3), - -(10279,'S24_2887',48,'106.87',2), - -(10279,'S24_3191',33,'78.76',4), - -(10279,'S24_3432',48,'95.30',6), - -(10280,'S10_1949',34,'205.73',2), - -(10280,'S18_1097',24,'98.00',1), - -(10280,'S18_1342',50,'87.33',9), - -(10280,'S18_1367',27,'47.44',8), - -(10280,'S18_1749',26,'161.50',16), - -(10280,'S18_2248',25,'53.28',15), - -(10280,'S18_2325',37,'109.33',13), - -(10280,'S18_2795',22,'158.63',10), - -(10280,'S18_2949',46,'82.06',3), - -(10280,'S18_2957',43,'54.34',5), - -(10280,'S18_3136',29,'102.63',4), - -(10280,'S18_3320',34,'99.21',7), - -(10280,'S18_4409',35,'77.31',17), - -(10280,'S24_1937',20,'29.87',12), - -(10280,'S24_2022',45,'36.29',11), - -(10280,'S24_3969',33,'35.29',14), - -(10280,'S24_4258',21,'79.86',6), - -(10281,'S10_4962',44,'132.97',9), - -(10281,'S12_1666',25,'127.10',13), - -(10281,'S12_4473',41,'98.36',1), - -(10281,'S18_2319',48,'114.14',4), - -(10281,'S18_2432',29,'56.52',7), - -(10281,'S18_3232',25,'135.47',5), - -(10281,'S18_4600',25,'96.86',10), - -(10281,'S18_4668',44,'42.76',14), - -(10281,'S24_2300',25,'112.46',6), - -(10281,'S24_2840',20,'33.95',2), - -(10281,'S32_1268',29,'80.90',8), - -(10281,'S32_2509',31,'44.91',3), - -(10281,'S32_3522',36,'59.47',12), - -(10281,'S700_2824',27,'89.01',11), - -(10282,'S12_1108',41,'176.63',5), - -(10282,'S12_3148',27,'142.02',6), - -(10282,'S12_3891',24,'169.56',4), - -(10282,'S18_2238',23,'147.36',13), - -(10282,'S18_3140',43,'122.93',1), - -(10282,'S18_3259',36,'88.74',3), - -(10282,'S18_4027',31,'132.13',8), - -(10282,'S24_1444',29,'49.71',11), - -(10282,'S24_4048',39,'96.99',10), - -(10282,'S32_3207',36,'51.58',9), - -(10282,'S50_1392',38,'114.59',12), - -(10282,'S50_1514',37,'56.24',7), - -(10282,'S700_1938',43,'77.95',2), - -(10283,'S10_4757',25,'130.56',6), - -(10283,'S18_3029',21,'78.28',4), - -(10283,'S18_3856',46,'100.58',3), - -(10283,'S18_4522',34,'71.97',14), - -(10283,'S24_2011',42,'99.54',13), - -(10283,'S24_3151',34,'80.54',8), - -(10283,'S24_3816',33,'77.15',5), - -(10283,'S700_1138',45,'62.00',9), - -(10283,'S700_2047',20,'74.23',2), - -(10283,'S700_2610',47,'68.67',7), - -(10283,'S700_3505',22,'88.15',10), - -(10283,'S700_3962',38,'85.41',11), - -(10283,'S72_1253',43,'41.22',1), - -(10283,'S72_3212',33,'49.14',12), - -(10284,'S18_1662',45,'137.19',11), - -(10284,'S18_2581',31,'68.43',1), - -(10284,'S24_1785',22,'101.76',3), - -(10284,'S24_2841',30,'65.08',12), - -(10284,'S24_3420',39,'59.83',13), - -(10284,'S24_3949',21,'65.51',10), - -(10284,'S24_4278',21,'66.65',2), - -(10284,'S32_4289',50,'60.54',4), - -(10284,'S50_1341',33,'35.78',5), - -(10284,'S700_1691',24,'87.69',6), - -(10284,'S700_2466',45,'95.73',8), - -(10284,'S700_3167',25,'68.00',7), - -(10284,'S700_4002',32,'73.29',9), - -(10285,'S10_1678',36,'95.70',6), - -(10285,'S10_2016',47,'110.61',9), - -(10285,'S10_4698',27,'166.55',8), - -(10285,'S12_2823',49,'131.04',5), - -(10285,'S18_2625',20,'50.88',10), - -(10285,'S24_1578',34,'91.29',7), - -(10285,'S24_2000',39,'61.70',11), - -(10285,'S24_2360',38,'64.41',3), - -(10285,'S32_1374',37,'82.91',12), - -(10285,'S32_2206',37,'36.61',1), - -(10285,'S32_4485',26,'100.01',4), - -(10285,'S50_4713',39,'76.48',2), - -(10285,'S700_2834',45,'102.04',13), - -(10286,'S18_3782',38,'51.60',1), - -(10287,'S12_1099',21,'190.68',12), - -(10287,'S12_3380',45,'117.44',10), - -(10287,'S12_3990',41,'74.21',13), - -(10287,'S12_4675',23,'107.10',9), - -(10287,'S18_1129',41,'113.23',4), - -(10287,'S18_1889',44,'61.60',8), - -(10287,'S18_1984',24,'123.76',3), - -(10287,'S18_2870',44,'114.84',1), - -(10287,'S18_3232',36,'137.17',5), - -(10287,'S18_3278',43,'68.35',15), - -(10287,'S18_3482',40,'127.88',14), - -(10287,'S18_3685',27,'139.87',2), - -(10287,'S18_4721',34,'119.04',17), - -(10287,'S24_2972',36,'31.34',6), - -(10287,'S24_3371',20,'58.17',11), - -(10287,'S24_3856',36,'137.62',7), - -(10287,'S24_4620',40,'79.22',16), - -(10288,'S18_1589',20,'120.71',14), - -(10288,'S18_1749',32,'168.30',5), - -(10288,'S18_2248',28,'50.25',4), - -(10288,'S18_2325',31,'102.98',2), - -(10288,'S18_4409',35,'90.19',6), - -(10288,'S18_4933',23,'57.02',7), - -(10288,'S24_1046',36,'66.88',11), - -(10288,'S24_1628',50,'49.30',13), - -(10288,'S24_1937',29,'32.19',1), - -(10288,'S24_2766',35,'81.78',9), - -(10288,'S24_2887',48,'109.22',8), - -(10288,'S24_3191',34,'76.19',10), - -(10288,'S24_3432',41,'101.73',12), - -(10288,'S24_3969',33,'37.75',3), - -(10289,'S18_1342',38,'92.47',2), - -(10289,'S18_1367',24,'44.75',1), - -(10289,'S18_2795',43,'141.75',3), - -(10289,'S24_2022',45,'41.22',4), - -(10290,'S18_3320',26,'80.36',2), - -(10290,'S24_4258',45,'83.76',1), - -(10291,'S10_1949',37,'210.01',11), - -(10291,'S10_4962',30,'141.83',4), - -(10291,'S12_1666',41,'123.00',8), - -(10291,'S18_1097',41,'96.84',10), - -(10291,'S18_2432',26,'52.26',2), - -(10291,'S18_2949',47,'99.28',12), - -(10291,'S18_2957',37,'56.21',14), - -(10291,'S18_3136',23,'93.20',13), - -(10291,'S18_4600',48,'96.86',5), - -(10291,'S18_4668',29,'45.28',9), - -(10291,'S24_2300',48,'109.90',1), - -(10291,'S32_1268',26,'82.83',3), - -(10291,'S32_3522',32,'53.00',7), - -(10291,'S700_2824',28,'86.99',6), - -(10292,'S12_4473',21,'94.80',8), - -(10292,'S18_2238',26,'140.81',7), - -(10292,'S18_2319',41,'103.09',11), - -(10292,'S18_3232',21,'147.33',12), - -(10292,'S18_4027',44,'114.90',2), - -(10292,'S24_1444',40,'48.55',5), - -(10292,'S24_2840',39,'34.30',9), - -(10292,'S24_4048',27,'113.55',4), - -(10292,'S32_2509',50,'54.11',10), - -(10292,'S32_3207',31,'59.65',3), - -(10292,'S50_1392',41,'113.44',6), - -(10292,'S50_1514',35,'49.79',1), - -(10293,'S12_1108',46,'187.02',8), - -(10293,'S12_3148',24,'129.93',9), - -(10293,'S12_3891',45,'171.29',7), - -(10293,'S18_3140',24,'110.64',4), - -(10293,'S18_3259',22,'91.76',6), - -(10293,'S18_4522',49,'72.85',3), - -(10293,'S24_2011',21,'111.83',2), - -(10293,'S700_1938',29,'77.95',5), - -(10293,'S72_3212',32,'51.32',1), - -(10294,'S700_3962',45,'98.32',1), - -(10295,'S10_4757',24,'136.00',1), - -(10295,'S24_3151',46,'84.08',3), - -(10295,'S700_1138',26,'62.00',4), - -(10295,'S700_2610',44,'71.56',2), - -(10295,'S700_3505',34,'93.16',5), - -(10296,'S18_1662',36,'146.65',7), - -(10296,'S18_3029',21,'69.68',13), - -(10296,'S18_3856',22,'105.87',12), - -(10296,'S24_2841',21,'60.97',8), - -(10296,'S24_3420',31,'63.78',9), - -(10296,'S24_3816',22,'83.02',14), - -(10296,'S24_3949',32,'63.46',6), - -(10296,'S50_1341',26,'41.02',1), - -(10296,'S700_1691',42,'75.81',2), - -(10296,'S700_2047',34,'89.61',11), - -(10296,'S700_2466',24,'96.73',4), - -(10296,'S700_3167',22,'74.40',3), - -(10296,'S700_4002',47,'61.44',5), - -(10296,'S72_1253',21,'46.68',10), - -(10297,'S18_2581',25,'81.95',4), - -(10297,'S24_1785',32,'107.23',6), - -(10297,'S24_2000',32,'70.08',1), - -(10297,'S24_4278',23,'71.73',5), - -(10297,'S32_1374',26,'88.90',2), - -(10297,'S32_4289',28,'63.29',7), - -(10297,'S700_2834',35,'111.53',3), - -(10298,'S10_2016',39,'105.86',1), - -(10298,'S18_2625',32,'60.57',2), - -(10299,'S10_1678',23,'76.56',9), - -(10299,'S10_4698',29,'164.61',11), - -(10299,'S12_2823',24,'123.51',8), - -(10299,'S18_3782',39,'62.17',3), - -(10299,'S18_4721',49,'119.04',2), - -(10299,'S24_1578',47,'107.07',10), - -(10299,'S24_2360',33,'58.87',6), - -(10299,'S24_4620',32,'66.29',1), - -(10299,'S32_2206',24,'36.21',4), - -(10299,'S32_4485',38,'84.70',7), - -(10299,'S50_4713',44,'77.29',5), - -(10300,'S12_1099',33,'184.84',5), - -(10300,'S12_3380',29,'116.27',3), - -(10300,'S12_3990',22,'76.61',6), - -(10300,'S12_4675',23,'95.58',2), - -(10300,'S18_1889',41,'63.14',1), - -(10300,'S18_3278',49,'65.94',8), - -(10300,'S18_3482',23,'144.05',7), - -(10300,'S24_3371',31,'52.05',4), - -(10301,'S18_1129',37,'114.65',8), - -(10301,'S18_1589',32,'118.22',4), - -(10301,'S18_1984',47,'119.49',7), - -(10301,'S18_2870',22,'113.52',5), - -(10301,'S18_3232',23,'135.47',9), - -(10301,'S18_3685',39,'137.04',6), - -(10301,'S24_1046',27,'64.67',1), - -(10301,'S24_1628',22,'40.75',3), - -(10301,'S24_2972',48,'32.10',10), - -(10301,'S24_3432',22,'86.73',2), - -(10301,'S24_3856',50,'122.17',11), - -(10302,'S18_1749',43,'166.60',1), - -(10302,'S18_4409',38,'82.83',2), - -(10302,'S18_4933',23,'70.56',3), - -(10302,'S24_2766',49,'75.42',5), - -(10302,'S24_2887',45,'104.52',4), - -(10302,'S24_3191',48,'74.48',6), - -(10303,'S18_2248',46,'56.91',2), - -(10303,'S24_3969',24,'35.70',1), - -(10304,'S10_1949',47,'201.44',6), - -(10304,'S12_1666',39,'117.54',3), - -(10304,'S18_1097',46,'106.17',5), - -(10304,'S18_1342',37,'95.55',13), - -(10304,'S18_1367',37,'46.90',12), - -(10304,'S18_2325',24,'102.98',17), - -(10304,'S18_2795',20,'141.75',14), - -(10304,'S18_2949',46,'98.27',7), - -(10304,'S18_2957',24,'54.34',9), - -(10304,'S18_3136',26,'90.06',8), - -(10304,'S18_3320',38,'95.24',11), - -(10304,'S18_4668',34,'44.27',4), - -(10304,'S24_1937',23,'29.21',16), - -(10304,'S24_2022',44,'42.11',15), - -(10304,'S24_4258',33,'80.83',10), - -(10304,'S32_3522',36,'52.36',2), - -(10304,'S700_2824',40,'80.92',1), - -(10305,'S10_4962',38,'130.01',13), - -(10305,'S12_4473',38,'107.84',5), - -(10305,'S18_2238',27,'132.62',4), - -(10305,'S18_2319',36,'117.82',8), - -(10305,'S18_2432',41,'58.95',11), - -(10305,'S18_3232',37,'160.87',9), - -(10305,'S18_4600',22,'112.60',14), - -(10305,'S24_1444',45,'48.55',2), - -(10305,'S24_2300',24,'107.34',10), - -(10305,'S24_2840',48,'30.76',6), - -(10305,'S24_4048',36,'118.28',1), - -(10305,'S32_1268',28,'94.38',12), - -(10305,'S32_2509',40,'48.70',7), - -(10305,'S50_1392',42,'109.96',3), - -(10306,'S12_1108',31,'182.86',13), - -(10306,'S12_3148',34,'145.04',14), - -(10306,'S12_3891',20,'145.34',12), - -(10306,'S18_3140',32,'114.74',9), - -(10306,'S18_3259',40,'83.70',11), - -(10306,'S18_4027',23,'126.39',16), - -(10306,'S18_4522',39,'85.14',8), - -(10306,'S24_2011',29,'109.37',7), - -(10306,'S24_3151',31,'76.12',2), - -(10306,'S32_3207',46,'60.28',17), - -(10306,'S50_1514',34,'51.55',15), - -(10306,'S700_1138',50,'61.34',3), - -(10306,'S700_1938',38,'73.62',10), - -(10306,'S700_2610',43,'62.16',1), - -(10306,'S700_3505',32,'99.17',4), - -(10306,'S700_3962',30,'87.39',5), - -(10306,'S72_3212',35,'48.05',6), - -(10307,'S10_4757',22,'118.32',9), - -(10307,'S18_1662',39,'135.61',1), - -(10307,'S18_3029',31,'71.40',7), - -(10307,'S18_3856',48,'92.11',6), - -(10307,'S24_2841',25,'58.23',2), - -(10307,'S24_3420',22,'64.44',3), - -(10307,'S24_3816',22,'75.47',8), - -(10307,'S700_2047',34,'81.47',5), - -(10307,'S72_1253',34,'44.20',4), - -(10308,'S10_2016',34,'115.37',2), - -(10308,'S10_4698',20,'187.85',1), - -(10308,'S18_2581',27,'81.95',7), - -(10308,'S18_2625',34,'48.46',3), - -(10308,'S24_1785',31,'99.57',9), - -(10308,'S24_2000',47,'68.55',4), - -(10308,'S24_3949',43,'58.00',16), - -(10308,'S24_4278',44,'71.73',8), - -(10308,'S32_1374',24,'99.89',5), - -(10308,'S32_4289',46,'61.22',10), - -(10308,'S50_1341',47,'37.09',11), - -(10308,'S700_1691',21,'73.07',12), - -(10308,'S700_2466',35,'88.75',14), - -(10308,'S700_2834',31,'100.85',6), - -(10308,'S700_3167',21,'79.20',13), - -(10308,'S700_4002',39,'62.93',15), - -(10309,'S10_1678',41,'94.74',5), - -(10309,'S12_2823',26,'144.60',4), - -(10309,'S24_1578',21,'96.92',6), - -(10309,'S24_2360',24,'59.56',2), - -(10309,'S32_4485',50,'93.89',3), - -(10309,'S50_4713',28,'74.04',1), - -(10310,'S12_1099',33,'165.38',10), - -(10310,'S12_3380',24,'105.70',8), - -(10310,'S12_3990',49,'77.41',11), - -(10310,'S12_4675',25,'101.34',7), - -(10310,'S18_1129',37,'128.80',2), - -(10310,'S18_1889',20,'66.99',6), - -(10310,'S18_1984',24,'129.45',1), - -(10310,'S18_3232',48,'159.18',3), - -(10310,'S18_3278',27,'70.76',13), - -(10310,'S18_3482',49,'122.00',12), - -(10310,'S18_3782',42,'59.06',16), - -(10310,'S18_4721',40,'133.92',15), - -(10310,'S24_2972',33,'33.23',4), - -(10310,'S24_3371',38,'50.21',9), - -(10310,'S24_3856',45,'139.03',5), - -(10310,'S24_4620',49,'75.18',14), - -(10310,'S32_2206',36,'38.62',17), - -(10311,'S18_1589',29,'124.44',9), - -(10311,'S18_2870',43,'114.84',10), - -(10311,'S18_3685',32,'134.22',11), - -(10311,'S18_4409',41,'92.03',1), - -(10311,'S18_4933',25,'66.99',2), - -(10311,'S24_1046',26,'70.55',6), - -(10311,'S24_1628',45,'48.80',8), - -(10311,'S24_2766',28,'89.05',4), - -(10311,'S24_2887',43,'116.27',3), - -(10311,'S24_3191',25,'85.61',5), - -(10311,'S24_3432',46,'91.02',7), - -(10312,'S10_1949',48,'214.30',3), - -(10312,'S18_1097',32,'101.50',2), - -(10312,'S18_1342',43,'102.74',10), - -(10312,'S18_1367',25,'43.67',9), - -(10312,'S18_1749',48,'146.20',17), - -(10312,'S18_2248',30,'48.43',16), - -(10312,'S18_2325',31,'111.87',14), - -(10312,'S18_2795',25,'150.19',11), - -(10312,'S18_2949',37,'91.18',4), - -(10312,'S18_2957',35,'54.34',6), - -(10312,'S18_3136',38,'93.20',5), - -(10312,'S18_3320',33,'84.33',8), - -(10312,'S18_4668',39,'44.27',1), - -(10312,'S24_1937',39,'27.88',13), - -(10312,'S24_2022',23,'43.46',12), - -(10312,'S24_3969',31,'40.21',15), - -(10312,'S24_4258',44,'96.42',7), - -(10313,'S10_4962',40,'141.83',7), - -(10313,'S12_1666',21,'131.20',11), - -(10313,'S18_2319',29,'109.23',2), - -(10313,'S18_2432',34,'52.87',5), - -(10313,'S18_3232',25,'143.94',3), - -(10313,'S18_4600',28,'110.18',8), - -(10313,'S24_2300',42,'102.23',4), - -(10313,'S32_1268',27,'96.31',6), - -(10313,'S32_2509',38,'48.70',1), - -(10313,'S32_3522',34,'55.59',10), - -(10313,'S700_2824',30,'96.09',9), - -(10314,'S12_1108',38,'176.63',5), - -(10314,'S12_3148',46,'125.40',6), - -(10314,'S12_3891',36,'169.56',4), - -(10314,'S12_4473',45,'95.99',14), - -(10314,'S18_2238',42,'135.90',13), - -(10314,'S18_3140',20,'129.76',1), - -(10314,'S18_3259',23,'84.71',3), - -(10314,'S18_4027',29,'129.26',8), - -(10314,'S24_1444',44,'51.44',11), - -(10314,'S24_2840',39,'31.82',15), - -(10314,'S24_4048',38,'111.18',10), - -(10314,'S32_3207',35,'58.41',9), - -(10314,'S50_1392',28,'115.75',12), - -(10314,'S50_1514',38,'50.38',7), - -(10314,'S700_1938',23,'83.15',2), - -(10315,'S18_4522',36,'78.12',7), - -(10315,'S24_2011',35,'111.83',6), - -(10315,'S24_3151',24,'78.77',1), - -(10315,'S700_1138',41,'60.67',2), - -(10315,'S700_3505',31,'99.17',3), - -(10315,'S700_3962',37,'88.39',4), - -(10315,'S72_3212',40,'51.32',5), - -(10316,'S10_4757',33,'126.48',17), - -(10316,'S18_1662',27,'140.34',9), - -(10316,'S18_3029',21,'72.26',15), - -(10316,'S18_3856',47,'89.99',14), - -(10316,'S24_1785',25,'93.01',1), - -(10316,'S24_2841',34,'67.14',10), - -(10316,'S24_3420',47,'55.23',11), - -(10316,'S24_3816',25,'77.15',16), - -(10316,'S24_3949',30,'67.56',8), - -(10316,'S32_4289',24,'59.16',2), - -(10316,'S50_1341',34,'36.66',3), - -(10316,'S700_1691',34,'74.90',4), - -(10316,'S700_2047',45,'73.32',13), - -(10316,'S700_2466',23,'85.76',6), - -(10316,'S700_2610',48,'67.22',18), - -(10316,'S700_3167',48,'77.60',5), - -(10316,'S700_4002',44,'68.11',7), - -(10316,'S72_1253',34,'43.70',12), - -(10317,'S24_4278',35,'69.55',1), - -(10318,'S10_1678',46,'84.22',1), - -(10318,'S10_2016',45,'102.29',4), - -(10318,'S10_4698',37,'189.79',3), - -(10318,'S18_2581',31,'81.95',9), - -(10318,'S18_2625',42,'49.67',5), - -(10318,'S24_1578',48,'93.54',2), - -(10318,'S24_2000',26,'60.94',6), - -(10318,'S32_1374',47,'81.91',7), - -(10318,'S700_2834',50,'102.04',8), - -(10319,'S12_2823',30,'134.05',9), - -(10319,'S18_3278',46,'77.19',1), - -(10319,'S18_3782',44,'54.71',4), - -(10319,'S18_4721',45,'120.53',3), - -(10319,'S24_2360',31,'65.80',7), - -(10319,'S24_4620',43,'78.41',2), - -(10319,'S32_2206',29,'35.00',5), - -(10319,'S32_4485',22,'96.95',8), - -(10319,'S50_4713',45,'79.73',6), - -(10320,'S12_1099',31,'184.84',3), - -(10320,'S12_3380',35,'102.17',1), - -(10320,'S12_3990',38,'63.84',4), - -(10320,'S18_3482',25,'139.64',5), - -(10320,'S24_3371',26,'60.62',2), - -(10321,'S12_4675',24,'105.95',15), - -(10321,'S18_1129',41,'123.14',10), - -(10321,'S18_1589',44,'120.71',6), - -(10321,'S18_1889',37,'73.92',14), - -(10321,'S18_1984',25,'142.25',9), - -(10321,'S18_2870',27,'126.72',7), - -(10321,'S18_3232',33,'164.26',11), - -(10321,'S18_3685',28,'138.45',8), - -(10321,'S24_1046',30,'68.35',3), - -(10321,'S24_1628',48,'42.76',5), - -(10321,'S24_2766',30,'74.51',1), - -(10321,'S24_2972',37,'31.72',12), - -(10321,'S24_3191',39,'81.33',2), - -(10321,'S24_3432',21,'103.87',4), - -(10321,'S24_3856',26,'137.62',13), - -(10322,'S10_1949',40,'180.01',1), - -(10322,'S10_4962',46,'141.83',8), - -(10322,'S12_1666',27,'136.67',9), - -(10322,'S18_1097',22,'101.50',10), - -(10322,'S18_1342',43,'92.47',14), - -(10322,'S18_1367',41,'44.21',5), - -(10322,'S18_2325',50,'120.77',6), - -(10322,'S18_2432',35,'57.12',11), - -(10322,'S18_2795',36,'158.63',2), - -(10322,'S18_2949',33,'100.30',12), - -(10322,'S18_2957',41,'54.34',13), - -(10322,'S18_3136',48,'90.06',7), - -(10322,'S24_1937',20,'26.55',3), - -(10322,'S24_2022',30,'40.77',4), - -(10323,'S18_3320',33,'88.30',2), - -(10323,'S18_4600',47,'96.86',1), - -(10324,'S12_3148',27,'148.06',1), - -(10324,'S12_4473',26,'100.73',7), - -(10324,'S18_2238',47,'142.45',8), - -(10324,'S18_2319',33,'105.55',10), - -(10324,'S18_3232',27,'137.17',12), - -(10324,'S18_4027',49,'120.64',13), - -(10324,'S18_4668',38,'49.81',6), - -(10324,'S24_1444',25,'49.71',14), - -(10324,'S24_2300',31,'107.34',2), - -(10324,'S24_2840',30,'29.35',9), - -(10324,'S24_4258',33,'95.44',3), - -(10324,'S32_1268',20,'91.49',11), - -(10324,'S32_3522',48,'60.76',4), - -(10324,'S700_2824',34,'80.92',5), - -(10325,'S10_4757',47,'111.52',6), - -(10325,'S12_1108',42,'193.25',8), - -(10325,'S12_3891',24,'166.10',1), - -(10325,'S18_3140',24,'114.74',9), - -(10325,'S24_4048',44,'114.73',5), - -(10325,'S32_2509',38,'44.37',3), - -(10325,'S32_3207',28,'55.30',2), - -(10325,'S50_1392',38,'99.55',4), - -(10325,'S50_1514',44,'56.24',7), - -(10326,'S18_3259',32,'94.79',6), - -(10326,'S18_4522',50,'73.73',5), - -(10326,'S24_2011',41,'120.43',4), - -(10326,'S24_3151',41,'86.74',3), - -(10326,'S24_3816',20,'81.34',2), - -(10326,'S700_1138',39,'60.67',1), - -(10327,'S18_1662',25,'154.54',6), - -(10327,'S18_2581',45,'74.34',8), - -(10327,'S18_3029',25,'74.84',5), - -(10327,'S700_1938',20,'79.68',7), - -(10327,'S700_2610',21,'65.05',1), - -(10327,'S700_3505',43,'85.14',2), - -(10327,'S700_3962',37,'83.42',3), - -(10327,'S72_3212',37,'48.05',4), - -(10328,'S18_3856',34,'104.81',6), - -(10328,'S24_1785',47,'87.54',14), - -(10328,'S24_2841',48,'67.82',1), - -(10328,'S24_3420',20,'56.55',2), - -(10328,'S24_3949',35,'55.96',3), - -(10328,'S24_4278',43,'69.55',4), - -(10328,'S32_4289',24,'57.10',5), - -(10328,'S50_1341',34,'42.33',7), - -(10328,'S700_1691',27,'84.03',8), - -(10328,'S700_2047',41,'75.13',9), - -(10328,'S700_2466',37,'95.73',10), - -(10328,'S700_2834',33,'117.46',11), - -(10328,'S700_3167',33,'71.20',13), - -(10328,'S700_4002',39,'69.59',12), - -(10329,'S10_1678',42,'80.39',1), - -(10329,'S10_2016',20,'109.42',2), - -(10329,'S10_4698',26,'164.61',3), - -(10329,'S12_1099',41,'182.90',5), - -(10329,'S12_2823',24,'128.03',6), - -(10329,'S12_3380',46,'117.44',13), - -(10329,'S12_3990',33,'74.21',14), - -(10329,'S12_4675',39,'102.49',15), - -(10329,'S18_1889',29,'66.22',9), - -(10329,'S18_2625',38,'55.72',12), - -(10329,'S18_3278',38,'65.13',10), - -(10329,'S24_1578',30,'104.81',7), - -(10329,'S24_2000',37,'71.60',4), - -(10329,'S32_1374',45,'80.91',11), - -(10329,'S72_1253',44,'41.22',8), - -(10330,'S18_3482',37,'136.70',3), - -(10330,'S18_3782',29,'59.06',2), - -(10330,'S18_4721',50,'133.92',4), - -(10330,'S24_2360',42,'56.10',1), - -(10331,'S18_1129',46,'120.31',6), - -(10331,'S18_1589',44,'99.55',14), - -(10331,'S18_1749',44,'154.70',7), - -(10331,'S18_1984',30,'135.14',8), - -(10331,'S18_2870',26,'130.68',10), - -(10331,'S18_3232',27,'169.34',11), - -(10331,'S18_3685',26,'132.80',12), - -(10331,'S24_2972',27,'37.00',13), - -(10331,'S24_3371',25,'55.11',9), - -(10331,'S24_3856',21,'139.03',1), - -(10331,'S24_4620',41,'70.33',2), - -(10331,'S32_2206',28,'33.39',3), - -(10331,'S32_4485',32,'100.01',4), - -(10331,'S50_4713',20,'74.04',5), - -(10332,'S18_1342',46,'89.38',15), - -(10332,'S18_1367',27,'51.21',16), - -(10332,'S18_2248',38,'53.88',9), - -(10332,'S18_2325',35,'116.96',8), - -(10332,'S18_2795',24,'138.38',1), - -(10332,'S18_2957',26,'53.09',17), - -(10332,'S18_3136',40,'100.53',18), - -(10332,'S18_4409',50,'92.03',2), - -(10332,'S18_4933',21,'70.56',3), - -(10332,'S24_1046',23,'61.73',4), - -(10332,'S24_1628',20,'47.29',5), - -(10332,'S24_1937',45,'29.87',6), - -(10332,'S24_2022',26,'43.01',10), - -(10332,'S24_2766',39,'84.51',7), - -(10332,'S24_2887',44,'108.04',11), - -(10332,'S24_3191',45,'77.91',12), - -(10332,'S24_3432',31,'94.23',13), - -(10332,'S24_3969',41,'34.47',14), - -(10333,'S10_1949',26,'188.58',3), - -(10333,'S12_1666',33,'121.64',6), - -(10333,'S18_1097',29,'110.84',7), - -(10333,'S18_2949',31,'95.23',5), - -(10333,'S18_3320',46,'95.24',2), - -(10333,'S18_4668',24,'42.26',8), - -(10333,'S24_4258',39,'95.44',1), - -(10333,'S32_3522',33,'62.05',4), - -(10334,'S10_4962',26,'130.01',2), - -(10334,'S18_2319',46,'108.00',6), - -(10334,'S18_2432',34,'52.87',1), - -(10334,'S18_3232',20,'147.33',3), - -(10334,'S18_4600',49,'101.71',4), - -(10334,'S24_2300',42,'117.57',5), - -(10335,'S24_2840',33,'32.88',2), - -(10335,'S32_1268',44,'77.05',1), - -(10335,'S32_2509',40,'49.78',3), - -(10336,'S12_1108',33,'176.63',10), - -(10336,'S12_3148',33,'126.91',11), - -(10336,'S12_3891',49,'141.88',1), - -(10336,'S12_4473',38,'95.99',3), - -(10336,'S18_2238',49,'153.91',6), - -(10336,'S18_3140',48,'135.22',12), - -(10336,'S18_3259',21,'100.84',7), - -(10336,'S24_1444',45,'49.71',4), - -(10336,'S24_4048',31,'113.55',5), - -(10336,'S32_3207',31,'59.03',9), - -(10336,'S50_1392',23,'109.96',8), - -(10336,'S700_2824',46,'94.07',2), - -(10337,'S10_4757',25,'131.92',8), - -(10337,'S18_4027',36,'140.75',3), - -(10337,'S18_4522',29,'76.36',2), - -(10337,'S24_2011',29,'119.20',4), - -(10337,'S50_1514',21,'54.48',6), - -(10337,'S700_1938',36,'73.62',9), - -(10337,'S700_3505',31,'84.14',1), - -(10337,'S700_3962',36,'83.42',7), - -(10337,'S72_3212',42,'49.14',5), - -(10338,'S18_1662',41,'137.19',1), - -(10338,'S18_3029',28,'80.86',3), - -(10338,'S18_3856',45,'93.17',2), - -(10339,'S10_2016',40,'117.75',4), - -(10339,'S10_4698',39,'178.17',3), - -(10339,'S18_2581',27,'79.41',2), - -(10339,'S18_2625',30,'48.46',1), - -(10339,'S24_1578',27,'96.92',10), - -(10339,'S24_1785',21,'106.14',7), - -(10339,'S24_2841',55,'67.82',12), - -(10339,'S24_3151',55,'73.46',13), - -(10339,'S24_3420',29,'57.86',14), - -(10339,'S24_3816',42,'72.96',16), - -(10339,'S24_3949',45,'57.32',11), - -(10339,'S700_1138',22,'53.34',5), - -(10339,'S700_2047',55,'86.90',15), - -(10339,'S700_2610',50,'62.16',9), - -(10339,'S700_4002',50,'66.63',8), - -(10339,'S72_1253',27,'49.66',6), - -(10340,'S24_2000',55,'62.46',8), - -(10340,'S24_4278',40,'63.76',1), - -(10340,'S32_1374',55,'95.89',2), - -(10340,'S32_4289',39,'67.41',3), - -(10340,'S50_1341',40,'37.09',4), - -(10340,'S700_1691',30,'73.99',5), - -(10340,'S700_2466',55,'81.77',7), - -(10340,'S700_2834',29,'98.48',6), - -(10341,'S10_1678',41,'84.22',9), - -(10341,'S12_1099',45,'192.62',2), - -(10341,'S12_2823',55,'120.50',8), - -(10341,'S12_3380',44,'111.57',1), - -(10341,'S12_3990',36,'77.41',10), - -(10341,'S12_4675',55,'109.40',7), - -(10341,'S24_2360',32,'63.03',6), - -(10341,'S32_4485',31,'95.93',4), - -(10341,'S50_4713',38,'78.11',3), - -(10341,'S700_3167',34,'70.40',5), - -(10342,'S18_1129',40,'118.89',2), - -(10342,'S18_1889',55,'63.14',1), - -(10342,'S18_1984',22,'115.22',3), - -(10342,'S18_3232',30,'167.65',4), - -(10342,'S18_3278',25,'76.39',5), - -(10342,'S18_3482',55,'136.70',7), - -(10342,'S18_3782',26,'57.82',8), - -(10342,'S18_4721',38,'124.99',11), - -(10342,'S24_2972',39,'30.59',9), - -(10342,'S24_3371',48,'60.01',10), - -(10342,'S24_3856',42,'112.34',6), - -(10343,'S18_1589',36,'109.51',4), - -(10343,'S18_2870',25,'118.80',3), - -(10343,'S18_3685',44,'127.15',2), - -(10343,'S24_1628',27,'44.78',6), - -(10343,'S24_4620',30,'76.80',1), - -(10343,'S32_2206',29,'37.41',5), - -(10344,'S18_1749',45,'168.30',1), - -(10344,'S18_2248',40,'49.04',2), - -(10344,'S18_2325',30,'118.23',3), - -(10344,'S18_4409',21,'80.99',4), - -(10344,'S18_4933',26,'68.42',5), - -(10344,'S24_1046',29,'61.00',7), - -(10344,'S24_1937',20,'27.88',6), - -(10345,'S24_2022',43,'38.98',1), - -(10346,'S18_1342',42,'88.36',3), - -(10346,'S24_2766',25,'87.24',1), - -(10346,'S24_2887',24,'117.44',5), - -(10346,'S24_3191',24,'80.47',2), - -(10346,'S24_3432',26,'103.87',6), - -(10346,'S24_3969',22,'38.57',4), - -(10347,'S10_1949',30,'188.58',1), - -(10347,'S10_4962',27,'132.97',2), - -(10347,'S12_1666',29,'132.57',3), - -(10347,'S18_1097',42,'113.17',5), - -(10347,'S18_1367',21,'46.36',7), - -(10347,'S18_2432',50,'51.05',8), - -(10347,'S18_2795',21,'136.69',6), - -(10347,'S18_2949',48,'84.09',9), - -(10347,'S18_2957',34,'60.59',10), - -(10347,'S18_3136',45,'95.30',11), - -(10347,'S18_3320',26,'84.33',12), - -(10347,'S18_4600',45,'115.03',4), - -(10348,'S12_1108',48,'207.80',8), - -(10348,'S12_3148',47,'122.37',4), - -(10348,'S18_4668',29,'43.77',6), - -(10348,'S24_2300',37,'107.34',1), - -(10348,'S24_4258',39,'82.78',2), - -(10348,'S32_1268',42,'90.53',3), - -(10348,'S32_3522',31,'62.70',5), - -(10348,'S700_2824',32,'100.14',7), - -(10349,'S12_3891',26,'166.10',10), - -(10349,'S12_4473',48,'114.95',9), - -(10349,'S18_2238',38,'142.45',8), - -(10349,'S18_2319',38,'117.82',7), - -(10349,'S18_3232',48,'164.26',6), - -(10349,'S18_4027',34,'140.75',5), - -(10349,'S24_1444',48,'50.29',4), - -(10349,'S24_2840',36,'31.47',3), - -(10349,'S24_4048',23,'111.18',2), - -(10349,'S32_2509',33,'44.37',1), - -(10350,'S10_4757',26,'110.16',5), - -(10350,'S18_3029',43,'84.30',6), - -(10350,'S18_3140',44,'135.22',1), - -(10350,'S18_3259',41,'94.79',2), - -(10350,'S18_4522',30,'70.22',3), - -(10350,'S24_2011',34,'98.31',7), - -(10350,'S24_3151',30,'86.74',9), - -(10350,'S24_3816',25,'77.15',10), - -(10350,'S32_3207',27,'61.52',14), - -(10350,'S50_1392',31,'104.18',8), - -(10350,'S50_1514',44,'56.82',17), - -(10350,'S700_1138',46,'56.00',11), - -(10350,'S700_1938',28,'76.22',4), - -(10350,'S700_2610',29,'68.67',12), - -(10350,'S700_3505',31,'87.15',13), - -(10350,'S700_3962',25,'97.32',16), - -(10350,'S72_3212',20,'48.05',15), - -(10351,'S18_1662',39,'143.50',1), - -(10351,'S18_3856',20,'104.81',2), - -(10351,'S24_2841',25,'64.40',5), - -(10351,'S24_3420',38,'53.92',4), - -(10351,'S24_3949',34,'68.24',3), - -(10352,'S700_2047',23,'75.13',3), - -(10352,'S700_2466',49,'87.75',2), - -(10352,'S700_4002',22,'62.19',1), - -(10352,'S72_1253',49,'46.18',4), - -(10353,'S18_2581',27,'71.81',1), - -(10353,'S24_1785',28,'107.23',2), - -(10353,'S24_4278',35,'69.55',3), - -(10353,'S32_1374',46,'86.90',5), - -(10353,'S32_4289',40,'68.10',7), - -(10353,'S50_1341',40,'35.78',8), - -(10353,'S700_1691',39,'73.07',9), - -(10353,'S700_2834',48,'98.48',4), - -(10353,'S700_3167',43,'74.40',6), - -(10354,'S10_1678',42,'84.22',6), - -(10354,'S10_2016',20,'95.15',2), - -(10354,'S10_4698',42,'178.17',3), - -(10354,'S12_1099',31,'157.60',9), - -(10354,'S12_2823',35,'141.58',4), - -(10354,'S12_3380',29,'98.65',11), - -(10354,'S12_3990',23,'76.61',12), - -(10354,'S12_4675',28,'100.19',13), - -(10354,'S18_1889',21,'76.23',8), - -(10354,'S18_2625',28,'49.06',10), - -(10354,'S18_3278',36,'69.15',7), - -(10354,'S24_1578',21,'96.92',5), - -(10354,'S24_2000',28,'62.46',1), - -(10355,'S18_3482',23,'117.59',7), - -(10355,'S18_3782',31,'60.30',1), - -(10355,'S18_4721',25,'124.99',2), - -(10355,'S24_2360',41,'56.10',3), - -(10355,'S24_2972',36,'37.38',4), - -(10355,'S24_3371',44,'60.62',6), - -(10355,'S24_3856',32,'137.62',8), - -(10355,'S24_4620',28,'75.18',9), - -(10355,'S32_2206',38,'32.99',10), - -(10355,'S32_4485',40,'93.89',5), - -(10356,'S18_1129',43,'120.31',8), - -(10356,'S18_1342',50,'82.19',9), - -(10356,'S18_1367',22,'44.75',6), - -(10356,'S18_1984',27,'130.87',2), - -(10356,'S18_2325',29,'106.79',3), - -(10356,'S18_2795',30,'158.63',1), - -(10356,'S24_1937',48,'31.86',5), - -(10356,'S24_2022',26,'42.11',7), - -(10356,'S50_4713',26,'78.11',4), - -(10357,'S10_1949',32,'199.30',10), - -(10357,'S10_4962',43,'135.92',9), - -(10357,'S12_1666',49,'109.34',8), - -(10357,'S18_1097',39,'112.00',1), - -(10357,'S18_2432',41,'58.95',7), - -(10357,'S18_2949',41,'91.18',6), - -(10357,'S18_2957',49,'59.34',5), - -(10357,'S18_3136',44,'104.72',4), - -(10357,'S18_3320',25,'84.33',3), - -(10357,'S18_4600',28,'105.34',2), - -(10358,'S12_3148',49,'129.93',5), - -(10358,'S12_4473',42,'98.36',9), - -(10358,'S18_2238',20,'142.45',10), - -(10358,'S18_2319',20,'99.41',11), - -(10358,'S18_3232',32,'137.17',12), - -(10358,'S18_4027',25,'117.77',13), - -(10358,'S18_4668',30,'46.29',8), - -(10358,'S24_1444',44,'56.07',14), - -(10358,'S24_2300',41,'127.79',7), - -(10358,'S24_2840',36,'33.59',4), - -(10358,'S24_4258',41,'88.62',6), - -(10358,'S32_1268',41,'82.83',1), - -(10358,'S32_3522',36,'51.71',2), - -(10358,'S700_2824',27,'85.98',3), - -(10359,'S10_4757',48,'122.40',6), - -(10359,'S12_1108',42,'180.79',8), - -(10359,'S12_3891',49,'162.64',5), - -(10359,'S24_4048',22,'108.82',7), - -(10359,'S32_2509',36,'45.45',3), - -(10359,'S32_3207',22,'62.14',1), - -(10359,'S50_1392',46,'99.55',2), - -(10359,'S50_1514',25,'47.45',4), - -(10360,'S18_1662',50,'126.15',12), - -(10360,'S18_2581',41,'68.43',13), - -(10360,'S18_3029',46,'71.40',14), - -(10360,'S18_3140',29,'122.93',8), - -(10360,'S18_3259',29,'94.79',18), - -(10360,'S18_3856',40,'101.64',15), - -(10360,'S18_4522',40,'76.36',1), - -(10360,'S24_1785',22,'106.14',17), - -(10360,'S24_2011',31,'100.77',2), - -(10360,'S24_2841',49,'55.49',16), - -(10360,'S24_3151',36,'70.81',3), - -(10360,'S24_3816',22,'78.83',4), - -(10360,'S700_1138',32,'64.67',5), - -(10360,'S700_1938',26,'86.61',6), - -(10360,'S700_2610',30,'70.11',7), - -(10360,'S700_3505',35,'83.14',9), - -(10360,'S700_3962',31,'92.36',10), - -(10360,'S72_3212',31,'54.05',11), - -(10361,'S10_1678',20,'92.83',13), - -(10361,'S10_2016',26,'114.18',8), - -(10361,'S24_3420',34,'62.46',6), - -(10361,'S24_3949',26,'61.42',7), - -(10361,'S24_4278',25,'68.83',1), - -(10361,'S32_4289',49,'56.41',2), - -(10361,'S50_1341',33,'35.78',3), - -(10361,'S700_1691',20,'88.60',4), - -(10361,'S700_2047',24,'85.99',14), - -(10361,'S700_2466',26,'91.74',9), - -(10361,'S700_2834',44,'107.97',5), - -(10361,'S700_3167',44,'76.80',10), - -(10361,'S700_4002',35,'62.19',11), - -(10361,'S72_1253',23,'47.67',12), - -(10362,'S10_4698',22,'182.04',4), - -(10362,'S12_2823',22,'131.04',1), - -(10362,'S18_2625',23,'53.91',3), - -(10362,'S24_1578',50,'91.29',2), - -(10363,'S12_1099',33,'180.95',3), - -(10363,'S12_3380',34,'106.87',4), - -(10363,'S12_3990',34,'68.63',5), - -(10363,'S12_4675',46,'103.64',6), - -(10363,'S18_1889',22,'61.60',7), - -(10363,'S18_3278',46,'69.15',10), - -(10363,'S18_3482',24,'124.94',11), - -(10363,'S18_3782',32,'52.22',12), - -(10363,'S18_4721',28,'123.50',13), - -(10363,'S24_2000',21,'70.08',8), - -(10363,'S24_2360',43,'56.10',14), - -(10363,'S24_3371',21,'52.05',15), - -(10363,'S24_3856',31,'113.75',1), - -(10363,'S24_4620',43,'75.99',9), - -(10363,'S32_1374',50,'92.90',2), - -(10364,'S32_2206',48,'38.22',1), - -(10365,'S18_1129',30,'116.06',1), - -(10365,'S32_4485',22,'82.66',3), - -(10365,'S50_4713',44,'68.34',2), - -(10366,'S18_1984',34,'116.65',3), - -(10366,'S18_2870',49,'105.60',2), - -(10366,'S18_3232',34,'154.10',1), - -(10367,'S18_1589',49,'105.77',1), - -(10367,'S18_1749',37,'144.50',3), - -(10367,'S18_2248',45,'50.25',4), - -(10367,'S18_2325',27,'124.59',5), - -(10367,'S18_2795',32,'140.06',7), - -(10367,'S18_3685',46,'131.39',6), - -(10367,'S18_4409',43,'77.31',8), - -(10367,'S18_4933',44,'66.99',9), - -(10367,'S24_1046',21,'72.76',10), - -(10367,'S24_1628',38,'50.31',11), - -(10367,'S24_1937',23,'29.54',13), - -(10367,'S24_2022',28,'43.01',12), - -(10367,'S24_2972',36,'36.25',2), - -(10368,'S24_2766',40,'73.60',2), - -(10368,'S24_2887',31,'115.09',5), - -(10368,'S24_3191',46,'83.04',1), - -(10368,'S24_3432',20,'93.16',4), - -(10368,'S24_3969',46,'36.52',3), - -(10369,'S10_1949',41,'195.01',2), - -(10369,'S18_1342',44,'89.38',8), - -(10369,'S18_1367',32,'46.36',7), - -(10369,'S18_2949',42,'100.30',1), - -(10369,'S18_2957',28,'51.84',6), - -(10369,'S18_3136',21,'90.06',5), - -(10369,'S18_3320',45,'80.36',4), - -(10369,'S24_4258',40,'93.49',3), - -(10370,'S10_4962',35,'128.53',4), - -(10370,'S12_1666',49,'128.47',8), - -(10370,'S18_1097',27,'100.34',1), - -(10370,'S18_2319',22,'101.87',5), - -(10370,'S18_2432',22,'60.16',7), - -(10370,'S18_3232',27,'167.65',9), - -(10370,'S18_4600',29,'105.34',6), - -(10370,'S18_4668',20,'41.76',2), - -(10370,'S32_3522',25,'63.99',3), - -(10371,'S12_1108',32,'178.71',6), - -(10371,'S12_4473',49,'104.28',4), - -(10371,'S18_2238',25,'160.46',7), - -(10371,'S24_1444',25,'53.75',12), - -(10371,'S24_2300',20,'126.51',5), - -(10371,'S24_2840',45,'35.01',8), - -(10371,'S24_4048',28,'95.81',9), - -(10371,'S32_1268',26,'82.83',1), - -(10371,'S32_2509',20,'44.37',2), - -(10371,'S32_3207',30,'53.44',11), - -(10371,'S50_1392',48,'97.23',10), - -(10371,'S700_2824',34,'83.95',3), - -(10372,'S12_3148',40,'146.55',4), - -(10372,'S12_3891',34,'140.15',1), - -(10372,'S18_3140',28,'131.13',3), - -(10372,'S18_3259',25,'91.76',5), - -(10372,'S18_4027',48,'119.20',6), - -(10372,'S18_4522',41,'78.99',7), - -(10372,'S24_2011',37,'102.00',8), - -(10372,'S50_1514',24,'56.82',9), - -(10372,'S700_1938',44,'74.48',2), - -(10373,'S10_4757',39,'118.32',3), - -(10373,'S18_1662',28,'143.50',4), - -(10373,'S18_3029',22,'75.70',5), - -(10373,'S18_3856',50,'99.52',6), - -(10373,'S24_2841',38,'58.92',7), - -(10373,'S24_3151',33,'82.31',12), - -(10373,'S24_3420',46,'53.92',11), - -(10373,'S24_3816',23,'83.86',10), - -(10373,'S24_3949',39,'62.10',13), - -(10373,'S700_1138',44,'58.00',14), - -(10373,'S700_2047',32,'76.94',15), - -(10373,'S700_2610',41,'69.39',16), - -(10373,'S700_3505',34,'94.16',2), - -(10373,'S700_3962',37,'83.42',8), - -(10373,'S700_4002',45,'68.11',17), - -(10373,'S72_1253',25,'44.20',9), - -(10373,'S72_3212',29,'48.05',1), - -(10374,'S10_2016',39,'115.37',5), - -(10374,'S10_4698',22,'158.80',1), - -(10374,'S18_2581',42,'75.19',2), - -(10374,'S18_2625',22,'48.46',4), - -(10374,'S24_1578',38,'112.70',6), - -(10374,'S24_1785',46,'107.23',3), - -(10375,'S10_1678',21,'76.56',12), - -(10375,'S12_1099',45,'184.84',7), - -(10375,'S12_2823',49,'150.62',13), - -(10375,'S24_2000',23,'67.03',9), - -(10375,'S24_2360',20,'60.26',14), - -(10375,'S24_4278',43,'60.13',2), - -(10375,'S32_1374',37,'87.90',3), - -(10375,'S32_4289',44,'59.85',4), - -(10375,'S32_4485',41,'96.95',15), - -(10375,'S50_1341',49,'36.22',5), - -(10375,'S50_4713',49,'69.16',8), - -(10375,'S700_1691',37,'86.77',6), - -(10375,'S700_2466',33,'94.73',1), - -(10375,'S700_2834',25,'98.48',10), - -(10375,'S700_3167',44,'69.60',11), - -(10376,'S12_3380',35,'98.65',1), - -(10377,'S12_3990',24,'65.44',5), - -(10377,'S12_4675',50,'112.86',1), - -(10377,'S18_1129',35,'124.56',2), - -(10377,'S18_1889',31,'61.60',4), - -(10377,'S18_1984',36,'125.18',6), - -(10377,'S18_3232',39,'143.94',3), - -(10378,'S18_1589',34,'121.95',5), - -(10378,'S18_3278',22,'66.74',4), - -(10378,'S18_3482',43,'146.99',10), - -(10378,'S18_3782',28,'60.30',9), - -(10378,'S18_4721',49,'122.02',8), - -(10378,'S24_2972',41,'30.59',7), - -(10378,'S24_3371',46,'52.66',6), - -(10378,'S24_3856',33,'129.20',3), - -(10378,'S24_4620',41,'80.84',2), - -(10378,'S32_2206',40,'35.80',1), - -(10379,'S18_1749',39,'156.40',2), - -(10379,'S18_2248',27,'50.85',1), - -(10379,'S18_2870',29,'113.52',5), - -(10379,'S18_3685',32,'134.22',4), - -(10379,'S24_1628',32,'48.80',3), - -(10380,'S18_1342',27,'88.36',13), - -(10380,'S18_2325',40,'119.50',10), - -(10380,'S18_2795',21,'156.94',8), - -(10380,'S18_4409',32,'78.23',1), - -(10380,'S18_4933',24,'66.99',2), - -(10380,'S24_1046',34,'66.88',3), - -(10380,'S24_1937',32,'29.87',4), - -(10380,'S24_2022',27,'37.63',5), - -(10380,'S24_2766',36,'77.24',6), - -(10380,'S24_2887',44,'111.57',7), - -(10380,'S24_3191',44,'77.05',9), - -(10380,'S24_3432',34,'91.02',11), - -(10380,'S24_3969',43,'32.82',12), - -(10381,'S10_1949',36,'182.16',3), - -(10381,'S10_4962',37,'138.88',6), - -(10381,'S12_1666',20,'132.57',1), - -(10381,'S18_1097',48,'114.34',2), - -(10381,'S18_1367',25,'49.60',9), - -(10381,'S18_2432',35,'60.77',7), - -(10381,'S18_2949',41,'100.30',8), - -(10381,'S18_2957',40,'51.22',4), - -(10381,'S18_3136',35,'93.20',5), - -(10382,'S12_1108',34,'166.24',10), - -(10382,'S12_3148',37,'145.04',11), - -(10382,'S12_3891',34,'143.61',12), - -(10382,'S12_4473',32,'103.10',13), - -(10382,'S18_2238',25,'160.46',5), - -(10382,'S18_3320',50,'84.33',7), - -(10382,'S18_4600',39,'115.03',1), - -(10382,'S18_4668',39,'46.29',2), - -(10382,'S24_2300',20,'120.12',3), - -(10382,'S24_4258',33,'97.39',4), - -(10382,'S32_1268',26,'85.72',6), - -(10382,'S32_3522',48,'57.53',8), - -(10382,'S700_2824',34,'101.15',9), - -(10383,'S18_2319',27,'119.05',11), - -(10383,'S18_3140',24,'125.66',9), - -(10383,'S18_3232',47,'155.79',6), - -(10383,'S18_3259',26,'83.70',12), - -(10383,'S18_4027',38,'137.88',1), - -(10383,'S18_4522',28,'77.24',7), - -(10383,'S24_1444',22,'52.60',2), - -(10383,'S24_2840',40,'33.24',3), - -(10383,'S24_4048',21,'117.10',4), - -(10383,'S32_2509',32,'53.57',5), - -(10383,'S32_3207',44,'55.93',8), - -(10383,'S50_1392',29,'94.92',13), - -(10383,'S50_1514',38,'48.62',10), - -(10384,'S10_4757',34,'129.20',4), - -(10384,'S24_2011',28,'114.29',3), - -(10384,'S24_3151',43,'71.69',2), - -(10384,'S700_1938',49,'71.02',1), - -(10385,'S24_3816',37,'78.83',2), - -(10385,'S700_1138',25,'62.00',1), - -(10386,'S18_1662',25,'130.88',7), - -(10386,'S18_2581',21,'72.65',18), - -(10386,'S18_3029',37,'73.12',5), - -(10386,'S18_3856',22,'100.58',6), - -(10386,'S24_1785',33,'101.76',11), - -(10386,'S24_2841',39,'56.86',1), - -(10386,'S24_3420',35,'54.57',9), - -(10386,'S24_3949',41,'55.96',12), - -(10386,'S24_4278',50,'71.73',8), - -(10386,'S700_2047',29,'85.09',13), - -(10386,'S700_2466',37,'90.75',14), - -(10386,'S700_2610',37,'67.22',10), - -(10386,'S700_3167',32,'68.00',17), - -(10386,'S700_3505',45,'83.14',2), - -(10386,'S700_3962',30,'80.44',3), - -(10386,'S700_4002',44,'59.22',15), - -(10386,'S72_1253',50,'47.67',16), - -(10386,'S72_3212',43,'52.42',4), - -(10387,'S32_1374',44,'79.91',1), - -(10388,'S10_1678',42,'80.39',4), - -(10388,'S10_2016',50,'118.94',5), - -(10388,'S10_4698',21,'156.86',7), - -(10388,'S12_2823',44,'125.01',6), - -(10388,'S32_4289',35,'58.47',8), - -(10388,'S50_1341',27,'41.02',1), - -(10388,'S700_1691',46,'74.90',2), - -(10388,'S700_2834',50,'111.53',3), - -(10389,'S12_1099',26,'182.90',4), - -(10389,'S12_3380',25,'95.13',6), - -(10389,'S12_3990',36,'76.61',7), - -(10389,'S12_4675',47,'102.49',8), - -(10389,'S18_1889',49,'63.91',3), - -(10389,'S18_2625',39,'52.09',5), - -(10389,'S24_1578',45,'112.70',1), - -(10389,'S24_2000',49,'61.70',2), - -(10390,'S18_1129',36,'117.48',14), - -(10390,'S18_1984',34,'132.29',15), - -(10390,'S18_2325',31,'102.98',16), - -(10390,'S18_2795',26,'162.00',7), - -(10390,'S18_3278',40,'75.59',9), - -(10390,'S18_3482',50,'135.23',1), - -(10390,'S18_3782',36,'54.09',2), - -(10390,'S18_4721',49,'122.02',3), - -(10390,'S24_2360',35,'67.87',4), - -(10390,'S24_2972',37,'35.87',5), - -(10390,'S24_3371',46,'51.43',6), - -(10390,'S24_3856',45,'134.81',8), - -(10390,'S24_4620',30,'66.29',10), - -(10390,'S32_2206',41,'39.02',11), - -(10390,'S32_4485',45,'101.03',12), - -(10390,'S50_4713',22,'81.36',13), - -(10391,'S10_1949',24,'195.01',4), - -(10391,'S10_4962',37,'121.15',7), - -(10391,'S12_1666',39,'110.70',9), - -(10391,'S18_1097',29,'114.34',10), - -(10391,'S18_1342',35,'102.74',2), - -(10391,'S18_1367',42,'47.44',3), - -(10391,'S18_2432',44,'57.73',5), - -(10391,'S18_2949',32,'99.28',6), - -(10391,'S24_1937',33,'26.55',8), - -(10391,'S24_2022',24,'36.29',1), - -(10392,'S18_2957',37,'61.21',3), - -(10392,'S18_3136',29,'103.67',2), - -(10392,'S18_3320',36,'98.22',1), - -(10393,'S12_3148',35,'145.04',8), - -(10393,'S12_4473',32,'99.54',10), - -(10393,'S18_2238',20,'137.53',11), - -(10393,'S18_2319',38,'104.32',7), - -(10393,'S18_4600',30,'106.55',9), - -(10393,'S18_4668',44,'41.76',1), - -(10393,'S24_2300',33,'112.46',2), - -(10393,'S24_4258',33,'88.62',3), - -(10393,'S32_1268',38,'84.75',4), - -(10393,'S32_3522',31,'63.35',5), - -(10393,'S700_2824',21,'83.95',6), - -(10394,'S18_3232',22,'135.47',5), - -(10394,'S18_4027',37,'124.95',1), - -(10394,'S24_1444',31,'53.18',2), - -(10394,'S24_2840',46,'35.36',6), - -(10394,'S24_4048',37,'104.09',7), - -(10394,'S32_2509',36,'47.08',3), - -(10394,'S32_3207',30,'55.93',4), - -(10395,'S10_4757',32,'125.12',2), - -(10395,'S12_1108',33,'205.72',1), - -(10395,'S50_1392',46,'98.39',4), - -(10395,'S50_1514',45,'57.99',3), - -(10396,'S12_3891',33,'155.72',3), - -(10396,'S18_3140',33,'129.76',2), - -(10396,'S18_3259',24,'91.76',4), - -(10396,'S18_4522',45,'83.38',5), - -(10396,'S24_2011',49,'100.77',6), - -(10396,'S24_3151',27,'77.00',7), - -(10396,'S24_3816',37,'77.99',8), - -(10396,'S700_1138',39,'62.00',1), - -(10397,'S700_1938',32,'69.29',5), - -(10397,'S700_2610',22,'62.88',4), - -(10397,'S700_3505',48,'86.15',3), - -(10397,'S700_3962',36,'80.44',2), - -(10397,'S72_3212',34,'52.96',1), - -(10398,'S18_1662',33,'130.88',11), - -(10398,'S18_2581',34,'82.79',15), - -(10398,'S18_3029',28,'70.54',18), - -(10398,'S18_3856',45,'92.11',17), - -(10398,'S24_1785',43,'100.67',16), - -(10398,'S24_2841',28,'60.29',3), - -(10398,'S24_3420',34,'61.15',13), - -(10398,'S24_3949',41,'56.64',2), - -(10398,'S24_4278',45,'65.93',14), - -(10398,'S32_4289',22,'60.54',4), - -(10398,'S50_1341',49,'38.84',5), - -(10398,'S700_1691',47,'78.55',6), - -(10398,'S700_2047',36,'75.13',7), - -(10398,'S700_2466',22,'98.72',8), - -(10398,'S700_2834',23,'102.04',9), - -(10398,'S700_3167',29,'76.80',10), - -(10398,'S700_4002',36,'62.19',12), - -(10398,'S72_1253',34,'41.22',1), - -(10399,'S10_1678',40,'77.52',8), - -(10399,'S10_2016',51,'99.91',7), - -(10399,'S10_4698',22,'156.86',6), - -(10399,'S12_2823',29,'123.51',5), - -(10399,'S18_2625',30,'51.48',4), - -(10399,'S24_1578',57,'104.81',3), - -(10399,'S24_2000',58,'75.41',2), - -(10399,'S32_1374',32,'97.89',1), - -(10400,'S10_4757',64,'134.64',9), - -(10400,'S18_1662',34,'129.31',1), - -(10400,'S18_3029',30,'74.84',7), - -(10400,'S18_3856',58,'88.93',6), - -(10400,'S24_2841',24,'55.49',2), - -(10400,'S24_3420',38,'59.18',3), - -(10400,'S24_3816',42,'74.64',8), - -(10400,'S700_2047',46,'82.37',5), - -(10400,'S72_1253',20,'41.71',4), - -(10401,'S18_2581',42,'75.19',3), - -(10401,'S24_1785',38,'87.54',5), - -(10401,'S24_3949',64,'59.37',12), - -(10401,'S24_4278',52,'65.93',4), - -(10401,'S32_1374',49,'81.91',1), - -(10401,'S32_4289',62,'62.60',6), - -(10401,'S50_1341',56,'41.46',7), - -(10401,'S700_1691',11,'77.64',8), - -(10401,'S700_2466',85,'98.72',10), - -(10401,'S700_2834',21,'96.11',2), - -(10401,'S700_3167',77,'73.60',9), - -(10401,'S700_4002',40,'66.63',11), - -(10402,'S10_2016',45,'118.94',1), - -(10402,'S18_2625',55,'58.15',2), - -(10402,'S24_2000',59,'61.70',3), - -(10403,'S10_1678',24,'85.17',7), - -(10403,'S10_4698',66,'174.29',9), - -(10403,'S12_2823',66,'122.00',6), - -(10403,'S18_3782',36,'55.33',1), - -(10403,'S24_1578',46,'109.32',8), - -(10403,'S24_2360',27,'57.49',4), - -(10403,'S32_2206',30,'35.80',2), - -(10403,'S32_4485',45,'88.78',5), - -(10403,'S50_4713',31,'65.09',3), - -(10404,'S12_1099',64,'163.44',3), - -(10404,'S12_3380',43,'102.17',1), - -(10404,'S12_3990',77,'67.03',4), - -(10404,'S18_3278',90,'67.54',6), - -(10404,'S18_3482',28,'127.88',5), - -(10404,'S18_4721',48,'124.99',8), - -(10404,'S24_3371',49,'53.27',2), - -(10404,'S24_4620',48,'65.48',7), - -(10405,'S12_4675',97,'115.16',5), - -(10405,'S18_1889',61,'72.38',4), - -(10405,'S18_3232',55,'147.33',1), - -(10405,'S24_2972',47,'37.38',2), - -(10405,'S24_3856',76,'127.79',3), - -(10406,'S18_1129',61,'124.56',3), - -(10406,'S18_1984',48,'133.72',2), - -(10406,'S18_3685',65,'117.26',1), - -(10407,'S18_1589',59,'114.48',11), - -(10407,'S18_1749',76,'141.10',2), - -(10407,'S18_2248',42,'58.12',1), - -(10407,'S18_2870',41,'132.00',12), - -(10407,'S18_4409',6,'91.11',3), - -(10407,'S18_4933',66,'64.14',4), - -(10407,'S24_1046',26,'68.35',8), - -(10407,'S24_1628',64,'45.78',10), - -(10407,'S24_2766',76,'81.78',6), - -(10407,'S24_2887',59,'98.65',5), - -(10407,'S24_3191',13,'77.05',7), - -(10407,'S24_3432',43,'101.73',9), - -(10408,'S24_3969',15,'41.03',1), - -(10409,'S18_2325',6,'104.25',2), - -(10409,'S24_1937',61,'27.88',1), - -(10410,'S18_1342',65,'99.66',7), - -(10410,'S18_1367',44,'51.21',6), - -(10410,'S18_2795',56,'145.13',8), - -(10410,'S18_2949',47,'93.21',1), - -(10410,'S18_2957',53,'49.97',3), - -(10410,'S18_3136',34,'84.82',2), - -(10410,'S18_3320',44,'81.35',5), - -(10410,'S24_2022',31,'42.56',9), - -(10410,'S24_4258',50,'95.44',4), - -(10411,'S10_1949',23,'205.73',9), - -(10411,'S10_4962',27,'144.79',2), - -(10411,'S12_1666',40,'110.70',6), - -(10411,'S18_1097',27,'109.67',8), - -(10411,'S18_4600',46,'106.55',3), - -(10411,'S18_4668',35,'41.25',7), - -(10411,'S32_1268',26,'78.01',1), - -(10411,'S32_3522',27,'60.76',5), - -(10411,'S700_2824',34,'89.01',4), - -(10412,'S12_4473',54,'100.73',5), - -(10412,'S18_2238',41,'150.63',4), - -(10412,'S18_2319',56,'120.28',8), - -(10412,'S18_2432',47,'49.83',11), - -(10412,'S18_3232',60,'157.49',9), - -(10412,'S24_1444',21,'47.40',2), - -(10412,'S24_2300',70,'109.90',10), - -(10412,'S24_2840',30,'32.88',6), - -(10412,'S24_4048',31,'108.82',1), - -(10412,'S32_2509',19,'50.86',7), - -(10412,'S50_1392',26,'105.33',3), - -(10413,'S12_1108',36,'201.57',2), - -(10413,'S12_3148',47,'145.04',3), - -(10413,'S12_3891',22,'173.02',1), - -(10413,'S18_4027',49,'133.57',5), - -(10413,'S32_3207',24,'56.55',6), - -(10413,'S50_1514',51,'53.31',4), - -(10414,'S10_4757',49,'114.24',3), - -(10414,'S18_3029',44,'77.42',1), - -(10414,'S18_3140',41,'128.39',12), - -(10414,'S18_3259',48,'85.71',14), - -(10414,'S18_4522',56,'83.38',11), - -(10414,'S24_2011',43,'108.14',10), - -(10414,'S24_3151',60,'72.58',5), - -(10414,'S24_3816',51,'72.96',2), - -(10414,'S700_1138',37,'62.00',6), - -(10414,'S700_1938',34,'74.48',13), - -(10414,'S700_2610',31,'61.44',4), - -(10414,'S700_3505',28,'84.14',7), - -(10414,'S700_3962',40,'84.41',8), - -(10414,'S72_3212',47,'54.60',9), - -(10415,'S18_3856',51,'86.81',5), - -(10415,'S24_2841',21,'60.97',1), - -(10415,'S24_3420',18,'59.83',2), - -(10415,'S700_2047',32,'73.32',4), - -(10415,'S72_1253',42,'43.20',3), - -(10416,'S18_1662',24,'129.31',14), - -(10416,'S18_2581',15,'70.96',4), - -(10416,'S24_1785',47,'90.82',6), - -(10416,'S24_2000',32,'62.46',1), - -(10416,'S24_3949',18,'64.83',13), - -(10416,'S24_4278',48,'70.28',5), - -(10416,'S32_1374',45,'86.90',2), - -(10416,'S32_4289',26,'68.10',7), - -(10416,'S50_1341',37,'39.71',8), - -(10416,'S700_1691',23,'88.60',9), - -(10416,'S700_2466',22,'84.76',11), - -(10416,'S700_2834',41,'98.48',3), - -(10416,'S700_3167',39,'65.60',10), - -(10416,'S700_4002',43,'63.67',12), - -(10417,'S10_1678',66,'79.43',2), - -(10417,'S10_2016',45,'116.56',5), - -(10417,'S10_4698',56,'162.67',4), - -(10417,'S12_2823',21,'144.60',1), - -(10417,'S18_2625',36,'58.75',6), - -(10417,'S24_1578',35,'109.32',3), - -(10418,'S18_3278',16,'70.76',2), - -(10418,'S18_3482',27,'139.64',1), - -(10418,'S18_3782',33,'56.57',5), - -(10418,'S18_4721',28,'120.53',4), - -(10418,'S24_2360',52,'64.41',8), - -(10418,'S24_4620',10,'66.29',3), - -(10418,'S32_2206',43,'36.61',6), - -(10418,'S32_4485',50,'100.01',9), - -(10418,'S50_4713',40,'72.41',7), - -(10419,'S12_1099',12,'182.90',13), - -(10419,'S12_3380',10,'111.57',11), - -(10419,'S12_3990',34,'64.64',14), - -(10419,'S12_4675',32,'99.04',10), - -(10419,'S18_1129',38,'117.48',5), - -(10419,'S18_1589',37,'100.80',1), - -(10419,'S18_1889',39,'67.76',9), - -(10419,'S18_1984',34,'133.72',4), - -(10419,'S18_2870',55,'116.16',2), - -(10419,'S18_3232',35,'165.95',6), - -(10419,'S18_3685',43,'114.44',3), - -(10419,'S24_2972',15,'32.10',7), - -(10419,'S24_3371',55,'52.66',12), - -(10419,'S24_3856',70,'112.34',8), - -(10420,'S18_1749',37,'153.00',5), - -(10420,'S18_2248',36,'52.06',4), - -(10420,'S18_2325',45,'116.96',2), - -(10420,'S18_4409',66,'73.62',6), - -(10420,'S18_4933',36,'68.42',7), - -(10420,'S24_1046',60,'60.26',11), - -(10420,'S24_1628',37,'48.80',13), - -(10420,'S24_1937',45,'32.19',1), - -(10420,'S24_2766',39,'76.33',9), - -(10420,'S24_2887',55,'115.09',8), - -(10420,'S24_3191',35,'77.05',10), - -(10420,'S24_3432',26,'104.94',12), - -(10420,'S24_3969',15,'35.29',3), - -(10421,'S18_2795',35,'167.06',1), - -(10421,'S24_2022',40,'44.80',2), - -(10422,'S18_1342',51,'91.44',2), - -(10422,'S18_1367',25,'47.44',1), - -(10423,'S18_2949',10,'89.15',1), - -(10423,'S18_2957',31,'56.21',3), - -(10423,'S18_3136',21,'98.44',2), - -(10423,'S18_3320',21,'80.36',5), - -(10423,'S24_4258',28,'78.89',4), - -(10424,'S10_1949',50,'201.44',6), - -(10424,'S12_1666',49,'121.64',3), - -(10424,'S18_1097',54,'108.50',5), - -(10424,'S18_4668',26,'40.25',4), - -(10424,'S32_3522',44,'54.94',2), - -(10424,'S700_2824',46,'85.98',1), - -(10425,'S10_4962',38,'131.49',12), - -(10425,'S12_4473',33,'95.99',4), - -(10425,'S18_2238',28,'147.36',3), - -(10425,'S18_2319',38,'117.82',7), - -(10425,'S18_2432',19,'48.62',10), - -(10425,'S18_3232',28,'140.55',8), - -(10425,'S18_4600',38,'107.76',13), - -(10425,'S24_1444',55,'53.75',1), - -(10425,'S24_2300',49,'127.79',9), - -(10425,'S24_2840',31,'31.82',5), - -(10425,'S32_1268',41,'83.79',11), - -(10425,'S32_2509',11,'50.32',6), - -(10425,'S50_1392',18,'94.92',2); - -/*Table structure for table `orders` */ - -DROP TABLE IF EXISTS `orders`; - -CREATE TABLE `orders` ( - `orderNumber` int(11) NOT NULL, - `orderDate` date NOT NULL, - `requiredDate` date NOT NULL, - `shippedDate` date DEFAULT NULL, - `status` varchar(15) NOT NULL, - `comments` text, - `customerNumber` int(11) NOT NULL, - PRIMARY KEY (`orderNumber`), - KEY `customerNumber` (`customerNumber`), - CONSTRAINT `orders_ibfk_1` FOREIGN KEY (`customerNumber`) REFERENCES `customers` (`customerNumber`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; - -/*Data for the table `orders` */ - -insert into `orders`(`orderNumber`,`orderDate`,`requiredDate`,`shippedDate`,`status`,`comments`,`customerNumber`) values - -(10100,'2003-01-06','2003-01-13','2003-01-10','Shipped',NULL,363), - -(10101,'2003-01-09','2003-01-18','2003-01-11','Shipped','Check on availability.',128), - -(10102,'2003-01-10','2003-01-18','2003-01-14','Shipped',NULL,181), - -(10103,'2003-01-29','2003-02-07','2003-02-02','Shipped',NULL,121), - -(10104,'2003-01-31','2003-02-09','2003-02-01','Shipped',NULL,141), - -(10105,'2003-02-11','2003-02-21','2003-02-12','Shipped',NULL,145), - -(10106,'2003-02-17','2003-02-24','2003-02-21','Shipped',NULL,278), - -(10107,'2003-02-24','2003-03-03','2003-02-26','Shipped','Difficult to negotiate with customer. We need more marketing materials',131), - -(10108,'2003-03-03','2003-03-12','2003-03-08','Shipped',NULL,385), - -(10109,'2003-03-10','2003-03-19','2003-03-11','Shipped','Customer requested that FedEx Ground is used for this shipping',486), - -(10110,'2003-03-18','2003-03-24','2003-03-20','Shipped',NULL,187), - -(10111,'2003-03-25','2003-03-31','2003-03-30','Shipped',NULL,129), - -(10112,'2003-03-24','2003-04-03','2003-03-29','Shipped','Customer requested that ad materials (such as posters, pamphlets) be included in the shippment',144), - -(10113,'2003-03-26','2003-04-02','2003-03-27','Shipped',NULL,124), - -(10114,'2003-04-01','2003-04-07','2003-04-02','Shipped',NULL,172), - -(10115,'2003-04-04','2003-04-12','2003-04-07','Shipped',NULL,424), - -(10116,'2003-04-11','2003-04-19','2003-04-13','Shipped',NULL,381), - -(10117,'2003-04-16','2003-04-24','2003-04-17','Shipped',NULL,148), - -(10118,'2003-04-21','2003-04-29','2003-04-26','Shipped','Customer has worked with some of our vendors in the past and is aware of their MSRP',216), - -(10119,'2003-04-28','2003-05-05','2003-05-02','Shipped',NULL,382), - -(10120,'2003-04-29','2003-05-08','2003-05-01','Shipped',NULL,114), - -(10121,'2003-05-07','2003-05-13','2003-05-13','Shipped',NULL,353), - -(10122,'2003-05-08','2003-05-16','2003-05-13','Shipped',NULL,350), - -(10123,'2003-05-20','2003-05-29','2003-05-22','Shipped',NULL,103), - -(10124,'2003-05-21','2003-05-29','2003-05-25','Shipped','Customer very concerned about the exact color of the models. There is high risk that he may dispute the order because there is a slight color mismatch',112), - -(10125,'2003-05-21','2003-05-27','2003-05-24','Shipped',NULL,114), - -(10126,'2003-05-28','2003-06-07','2003-06-02','Shipped',NULL,458), - -(10127,'2003-06-03','2003-06-09','2003-06-06','Shipped','Customer requested special shippment. The instructions were passed along to the warehouse',151), - -(10128,'2003-06-06','2003-06-12','2003-06-11','Shipped',NULL,141), - -(10129,'2003-06-12','2003-06-18','2003-06-14','Shipped',NULL,324), - -(10130,'2003-06-16','2003-06-24','2003-06-21','Shipped',NULL,198), - -(10131,'2003-06-16','2003-06-25','2003-06-21','Shipped',NULL,447), - -(10132,'2003-06-25','2003-07-01','2003-06-28','Shipped',NULL,323), - -(10133,'2003-06-27','2003-07-04','2003-07-03','Shipped',NULL,141), - -(10134,'2003-07-01','2003-07-10','2003-07-05','Shipped',NULL,250), - -(10135,'2003-07-02','2003-07-12','2003-07-03','Shipped',NULL,124), - -(10136,'2003-07-04','2003-07-14','2003-07-06','Shipped','Customer is interested in buying more Ferrari models',242), - -(10137,'2003-07-10','2003-07-20','2003-07-14','Shipped',NULL,353), - -(10138,'2003-07-07','2003-07-16','2003-07-13','Shipped',NULL,496), - -(10139,'2003-07-16','2003-07-23','2003-07-21','Shipped',NULL,282), - -(10140,'2003-07-24','2003-08-02','2003-07-30','Shipped',NULL,161), - -(10141,'2003-08-01','2003-08-09','2003-08-04','Shipped',NULL,334), - -(10142,'2003-08-08','2003-08-16','2003-08-13','Shipped',NULL,124), - -(10143,'2003-08-10','2003-08-18','2003-08-12','Shipped','Can we deliver the new Ford Mustang models by end-of-quarter?',320), - -(10144,'2003-08-13','2003-08-21','2003-08-14','Shipped',NULL,381), - -(10145,'2003-08-25','2003-09-02','2003-08-31','Shipped',NULL,205), - -(10146,'2003-09-03','2003-09-13','2003-09-06','Shipped',NULL,447), - -(10147,'2003-09-05','2003-09-12','2003-09-09','Shipped',NULL,379), - -(10148,'2003-09-11','2003-09-21','2003-09-15','Shipped','They want to reevaluate their terms agreement with Finance.',276), - -(10149,'2003-09-12','2003-09-18','2003-09-17','Shipped',NULL,487), - -(10150,'2003-09-19','2003-09-27','2003-09-21','Shipped','They want to reevaluate their terms agreement with Finance.',148), - -(10151,'2003-09-21','2003-09-30','2003-09-24','Shipped',NULL,311), - -(10152,'2003-09-25','2003-10-03','2003-10-01','Shipped',NULL,333), - -(10153,'2003-09-28','2003-10-05','2003-10-03','Shipped',NULL,141), - -(10154,'2003-10-02','2003-10-12','2003-10-08','Shipped',NULL,219), - -(10155,'2003-10-06','2003-10-13','2003-10-07','Shipped',NULL,186), - -(10156,'2003-10-08','2003-10-17','2003-10-11','Shipped',NULL,141), - -(10157,'2003-10-09','2003-10-15','2003-10-14','Shipped',NULL,473), - -(10158,'2003-10-10','2003-10-18','2003-10-15','Shipped',NULL,121), - -(10159,'2003-10-10','2003-10-19','2003-10-16','Shipped',NULL,321), - -(10160,'2003-10-11','2003-10-17','2003-10-17','Shipped',NULL,347), - -(10161,'2003-10-17','2003-10-25','2003-10-20','Shipped',NULL,227), - -(10162,'2003-10-18','2003-10-26','2003-10-19','Shipped',NULL,321), - -(10163,'2003-10-20','2003-10-27','2003-10-24','Shipped',NULL,424), - -(10164,'2003-10-21','2003-10-30','2003-10-23','Resolved','This order was disputed, but resolved on 11/1/2003; Customer doesn\'t like the colors and precision of the models.',452), - -(10165,'2003-10-22','2003-10-31','2003-12-26','Shipped','This order was on hold because customers\'s credit limit had been exceeded. Order will ship when payment is received',148), - -(10166,'2003-10-21','2003-10-30','2003-10-27','Shipped',NULL,462), - -(10167,'2003-10-23','2003-10-30',NULL,'Cancelled','Customer called to cancel. The warehouse was notified in time and the order didn\'t ship. They have a new VP of Sales and are shifting their sales model. Our VP of Sales should contact them.',448), - -(10168,'2003-10-28','2003-11-03','2003-11-01','Shipped',NULL,161), - -(10169,'2003-11-04','2003-11-14','2003-11-09','Shipped',NULL,276), - -(10170,'2003-11-04','2003-11-12','2003-11-07','Shipped',NULL,452), - -(10171,'2003-11-05','2003-11-13','2003-11-07','Shipped',NULL,233), - -(10172,'2003-11-05','2003-11-14','2003-11-11','Shipped',NULL,175), - -(10173,'2003-11-05','2003-11-15','2003-11-09','Shipped','Cautious optimism. We have happy customers here, if we can keep them well stocked. I need all the information I can get on the planned shippments of Porches',278), - -(10174,'2003-11-06','2003-11-15','2003-11-10','Shipped',NULL,333), - -(10175,'2003-11-06','2003-11-14','2003-11-09','Shipped',NULL,324), - -(10176,'2003-11-06','2003-11-15','2003-11-12','Shipped',NULL,386), - -(10177,'2003-11-07','2003-11-17','2003-11-12','Shipped',NULL,344), - -(10178,'2003-11-08','2003-11-16','2003-11-10','Shipped','Custom shipping instructions sent to warehouse',242), - -(10179,'2003-11-11','2003-11-17','2003-11-13','Cancelled','Customer cancelled due to urgent budgeting issues. Must be cautious when dealing with them in the future. Since order shipped already we must discuss who would cover the shipping charges.',496), - -(10180,'2003-11-11','2003-11-19','2003-11-14','Shipped',NULL,171), - -(10181,'2003-11-12','2003-11-19','2003-11-15','Shipped',NULL,167), - -(10182,'2003-11-12','2003-11-21','2003-11-18','Shipped',NULL,124), - -(10183,'2003-11-13','2003-11-22','2003-11-15','Shipped','We need to keep in close contact with their Marketing VP. He is the decision maker for all their purchases.',339), - -(10184,'2003-11-14','2003-11-22','2003-11-20','Shipped',NULL,484), - -(10185,'2003-11-14','2003-11-21','2003-11-20','Shipped',NULL,320), - -(10186,'2003-11-14','2003-11-20','2003-11-18','Shipped','They want to reevaluate their terms agreement with the VP of Sales',489), - -(10187,'2003-11-15','2003-11-24','2003-11-16','Shipped',NULL,211), - -(10188,'2003-11-18','2003-11-26','2003-11-24','Shipped',NULL,167), - -(10189,'2003-11-18','2003-11-25','2003-11-24','Shipped','They want to reevaluate their terms agreement with Finance.',205), - -(10190,'2003-11-19','2003-11-29','2003-11-20','Shipped',NULL,141), - -(10191,'2003-11-20','2003-11-30','2003-11-24','Shipped','We must be cautions with this customer. Their VP of Sales resigned. Company may be heading down.',259), - -(10192,'2003-11-20','2003-11-29','2003-11-25','Shipped',NULL,363), - -(10193,'2003-11-21','2003-11-28','2003-11-27','Shipped',NULL,471), - -(10194,'2003-11-25','2003-12-02','2003-11-26','Shipped',NULL,146), - -(10195,'2003-11-25','2003-12-01','2003-11-28','Shipped',NULL,319), - -(10196,'2003-11-26','2003-12-03','2003-12-01','Shipped',NULL,455), - -(10197,'2003-11-26','2003-12-02','2003-12-01','Shipped','Customer inquired about remote controlled models and gold models.',216), - -(10198,'2003-11-27','2003-12-06','2003-12-03','Shipped',NULL,385), - -(10199,'2003-12-01','2003-12-10','2003-12-06','Shipped',NULL,475), - -(10200,'2003-12-01','2003-12-09','2003-12-06','Shipped',NULL,211), - -(10201,'2003-12-01','2003-12-11','2003-12-02','Shipped',NULL,129), - -(10202,'2003-12-02','2003-12-09','2003-12-06','Shipped',NULL,357), - -(10203,'2003-12-02','2003-12-11','2003-12-07','Shipped',NULL,141), - -(10204,'2003-12-02','2003-12-10','2003-12-04','Shipped',NULL,151), - -(10205,'2003-12-03','2003-12-09','2003-12-07','Shipped',' I need all the information I can get on our competitors.',141), - -(10206,'2003-12-05','2003-12-13','2003-12-08','Shipped','Can we renegotiate this one?',202), - -(10207,'2003-12-09','2003-12-17','2003-12-11','Shipped','Check on availability.',495), - -(10208,'2004-01-02','2004-01-11','2004-01-04','Shipped',NULL,146), - -(10209,'2004-01-09','2004-01-15','2004-01-12','Shipped',NULL,347), - -(10210,'2004-01-12','2004-01-22','2004-01-20','Shipped',NULL,177), - -(10211,'2004-01-15','2004-01-25','2004-01-18','Shipped',NULL,406), - -(10212,'2004-01-16','2004-01-24','2004-01-18','Shipped',NULL,141), - -(10213,'2004-01-22','2004-01-28','2004-01-27','Shipped','Difficult to negotiate with customer. We need more marketing materials',489), - -(10214,'2004-01-26','2004-02-04','2004-01-29','Shipped',NULL,458), - -(10215,'2004-01-29','2004-02-08','2004-02-01','Shipped','Customer requested that FedEx Ground is used for this shipping',475), - -(10216,'2004-02-02','2004-02-10','2004-02-04','Shipped',NULL,256), - -(10217,'2004-02-04','2004-02-14','2004-02-06','Shipped',NULL,166), - -(10218,'2004-02-09','2004-02-16','2004-02-11','Shipped','Customer requested that ad materials (such as posters, pamphlets) be included in the shippment',473), - -(10219,'2004-02-10','2004-02-17','2004-02-12','Shipped',NULL,487), - -(10220,'2004-02-12','2004-02-19','2004-02-16','Shipped',NULL,189), - -(10221,'2004-02-18','2004-02-26','2004-02-19','Shipped',NULL,314), - -(10222,'2004-02-19','2004-02-27','2004-02-20','Shipped',NULL,239), - -(10223,'2004-02-20','2004-02-29','2004-02-24','Shipped',NULL,114), - -(10224,'2004-02-21','2004-03-02','2004-02-26','Shipped','Customer has worked with some of our vendors in the past and is aware of their MSRP',171), - -(10225,'2004-02-22','2004-03-01','2004-02-24','Shipped',NULL,298), - -(10226,'2004-02-26','2004-03-06','2004-03-02','Shipped',NULL,239), - -(10227,'2004-03-02','2004-03-12','2004-03-08','Shipped',NULL,146), - -(10228,'2004-03-10','2004-03-18','2004-03-13','Shipped',NULL,173), - -(10229,'2004-03-11','2004-03-20','2004-03-12','Shipped',NULL,124), - -(10230,'2004-03-15','2004-03-24','2004-03-20','Shipped','Customer very concerned about the exact color of the models. There is high risk that he may dispute the order because there is a slight color mismatch',128), - -(10231,'2004-03-19','2004-03-26','2004-03-25','Shipped',NULL,344), - -(10232,'2004-03-20','2004-03-30','2004-03-25','Shipped',NULL,240), - -(10233,'2004-03-29','2004-04-04','2004-04-02','Shipped','Customer requested special shippment. The instructions were passed along to the warehouse',328), - -(10234,'2004-03-30','2004-04-05','2004-04-02','Shipped',NULL,412), - -(10235,'2004-04-02','2004-04-12','2004-04-06','Shipped',NULL,260), - -(10236,'2004-04-03','2004-04-11','2004-04-08','Shipped',NULL,486), - -(10237,'2004-04-05','2004-04-12','2004-04-10','Shipped',NULL,181), - -(10238,'2004-04-09','2004-04-16','2004-04-10','Shipped',NULL,145), - -(10239,'2004-04-12','2004-04-21','2004-04-17','Shipped',NULL,311), - -(10240,'2004-04-13','2004-04-20','2004-04-20','Shipped',NULL,177), - -(10241,'2004-04-13','2004-04-20','2004-04-19','Shipped',NULL,209), - -(10242,'2004-04-20','2004-04-28','2004-04-25','Shipped','Customer is interested in buying more Ferrari models',456), - -(10243,'2004-04-26','2004-05-03','2004-04-28','Shipped',NULL,495), - -(10244,'2004-04-29','2004-05-09','2004-05-04','Shipped',NULL,141), - -(10245,'2004-05-04','2004-05-12','2004-05-09','Shipped',NULL,455), - -(10246,'2004-05-05','2004-05-13','2004-05-06','Shipped',NULL,141), - -(10247,'2004-05-05','2004-05-11','2004-05-08','Shipped',NULL,334), - -(10248,'2004-05-07','2004-05-14',NULL,'Cancelled','Order was mistakenly placed. The warehouse noticed the lack of documentation.',131), - -(10249,'2004-05-08','2004-05-17','2004-05-11','Shipped','Can we deliver the new Ford Mustang models by end-of-quarter?',173), - -(10250,'2004-05-11','2004-05-19','2004-05-15','Shipped',NULL,450), - -(10251,'2004-05-18','2004-05-24','2004-05-24','Shipped',NULL,328), - -(10252,'2004-05-26','2004-06-04','2004-05-29','Shipped',NULL,406), - -(10253,'2004-06-01','2004-06-09','2004-06-02','Cancelled','Customer disputed the order and we agreed to cancel it. We must be more cautions with this customer going forward, since they are very hard to please. We must cover the shipping fees.',201), - -(10254,'2004-06-03','2004-06-13','2004-06-04','Shipped','Customer requested that DHL is used for this shipping',323), - -(10255,'2004-06-04','2004-06-12','2004-06-09','Shipped',NULL,209), - -(10256,'2004-06-08','2004-06-16','2004-06-10','Shipped',NULL,145), - -(10257,'2004-06-14','2004-06-24','2004-06-15','Shipped',NULL,450), - -(10258,'2004-06-15','2004-06-25','2004-06-23','Shipped',NULL,398), - -(10259,'2004-06-15','2004-06-22','2004-06-17','Shipped',NULL,166), - -(10260,'2004-06-16','2004-06-22',NULL,'Cancelled','Customer heard complaints from their customers and called to cancel this order. Will notify the Sales Manager.',357), - -(10261,'2004-06-17','2004-06-25','2004-06-22','Shipped',NULL,233), - -(10262,'2004-06-24','2004-07-01',NULL,'Cancelled','This customer found a better offer from one of our competitors. Will call back to renegotiate.',141), - -(10263,'2004-06-28','2004-07-04','2004-07-02','Shipped',NULL,175), - -(10264,'2004-06-30','2004-07-06','2004-07-01','Shipped','Customer will send a truck to our local warehouse on 7/1/2004',362), - -(10265,'2004-07-02','2004-07-09','2004-07-07','Shipped',NULL,471), - -(10266,'2004-07-06','2004-07-14','2004-07-10','Shipped',NULL,386), - -(10267,'2004-07-07','2004-07-17','2004-07-09','Shipped',NULL,151), - -(10268,'2004-07-12','2004-07-18','2004-07-14','Shipped',NULL,412), - -(10269,'2004-07-16','2004-07-22','2004-07-18','Shipped',NULL,382), - -(10270,'2004-07-19','2004-07-27','2004-07-24','Shipped','Can we renegotiate this one?',282), - -(10271,'2004-07-20','2004-07-29','2004-07-23','Shipped',NULL,124), - -(10272,'2004-07-20','2004-07-26','2004-07-22','Shipped',NULL,157), - -(10273,'2004-07-21','2004-07-28','2004-07-22','Shipped',NULL,314), - -(10274,'2004-07-21','2004-07-29','2004-07-22','Shipped',NULL,379), - -(10275,'2004-07-23','2004-08-02','2004-07-29','Shipped',NULL,119), - -(10276,'2004-08-02','2004-08-11','2004-08-08','Shipped',NULL,204), - -(10277,'2004-08-04','2004-08-12','2004-08-05','Shipped',NULL,148), - -(10278,'2004-08-06','2004-08-16','2004-08-09','Shipped',NULL,112), - -(10279,'2004-08-09','2004-08-19','2004-08-15','Shipped','Cautious optimism. We have happy customers here, if we can keep them well stocked. I need all the information I can get on the planned shippments of Porches',141), - -(10280,'2004-08-17','2004-08-27','2004-08-19','Shipped',NULL,249), - -(10281,'2004-08-19','2004-08-28','2004-08-23','Shipped',NULL,157), - -(10282,'2004-08-20','2004-08-26','2004-08-22','Shipped',NULL,124), - -(10283,'2004-08-20','2004-08-30','2004-08-23','Shipped',NULL,260), - -(10284,'2004-08-21','2004-08-29','2004-08-26','Shipped','Custom shipping instructions sent to warehouse',299), - -(10285,'2004-08-27','2004-09-04','2004-08-31','Shipped',NULL,286), - -(10286,'2004-08-28','2004-09-06','2004-09-01','Shipped',NULL,172), - -(10287,'2004-08-30','2004-09-06','2004-09-01','Shipped',NULL,298), - -(10288,'2004-09-01','2004-09-11','2004-09-05','Shipped',NULL,166), - -(10289,'2004-09-03','2004-09-13','2004-09-04','Shipped','We need to keep in close contact with their Marketing VP. He is the decision maker for all their purchases.',167), - -(10290,'2004-09-07','2004-09-15','2004-09-13','Shipped',NULL,198), - -(10291,'2004-09-08','2004-09-17','2004-09-14','Shipped',NULL,448), - -(10292,'2004-09-08','2004-09-18','2004-09-11','Shipped','They want to reevaluate their terms agreement with Finance.',131), - -(10293,'2004-09-09','2004-09-18','2004-09-14','Shipped',NULL,249), - -(10294,'2004-09-10','2004-09-17','2004-09-14','Shipped',NULL,204), - -(10295,'2004-09-10','2004-09-17','2004-09-14','Shipped','They want to reevaluate their terms agreement with Finance.',362), - -(10296,'2004-09-15','2004-09-22','2004-09-16','Shipped',NULL,415), - -(10297,'2004-09-16','2004-09-22','2004-09-21','Shipped','We must be cautions with this customer. Their VP of Sales resigned. Company may be heading down.',189), - -(10298,'2004-09-27','2004-10-05','2004-10-01','Shipped',NULL,103), - -(10299,'2004-09-30','2004-10-10','2004-10-01','Shipped',NULL,186), - -(10300,'2003-10-04','2003-10-13','2003-10-09','Shipped',NULL,128), - -(10301,'2003-10-05','2003-10-15','2003-10-08','Shipped',NULL,299), - -(10302,'2003-10-06','2003-10-16','2003-10-07','Shipped',NULL,201), - -(10303,'2004-10-06','2004-10-14','2004-10-09','Shipped','Customer inquired about remote controlled models and gold models.',484), - -(10304,'2004-10-11','2004-10-20','2004-10-17','Shipped',NULL,256), - -(10305,'2004-10-13','2004-10-22','2004-10-15','Shipped','Check on availability.',286), - -(10306,'2004-10-14','2004-10-21','2004-10-17','Shipped',NULL,187), - -(10307,'2004-10-14','2004-10-23','2004-10-20','Shipped',NULL,339), - -(10308,'2004-10-15','2004-10-24','2004-10-20','Shipped','Customer requested that FedEx Ground is used for this shipping',319), - -(10309,'2004-10-15','2004-10-24','2004-10-18','Shipped',NULL,121), - -(10310,'2004-10-16','2004-10-24','2004-10-18','Shipped',NULL,259), - -(10311,'2004-10-16','2004-10-23','2004-10-20','Shipped','Difficult to negotiate with customer. We need more marketing materials',141), - -(10312,'2004-10-21','2004-10-27','2004-10-23','Shipped',NULL,124), - -(10313,'2004-10-22','2004-10-28','2004-10-25','Shipped','Customer requested that FedEx Ground is used for this shipping',202), - -(10314,'2004-10-22','2004-11-01','2004-10-23','Shipped',NULL,227), - -(10315,'2004-10-29','2004-11-08','2004-10-30','Shipped',NULL,119), - -(10316,'2004-11-01','2004-11-09','2004-11-07','Shipped','Customer requested that ad materials (such as posters, pamphlets) be included in the shippment',240), - -(10317,'2004-11-02','2004-11-12','2004-11-08','Shipped',NULL,161), - -(10318,'2004-11-02','2004-11-09','2004-11-07','Shipped',NULL,157), - -(10319,'2004-11-03','2004-11-11','2004-11-06','Shipped','Customer requested that DHL is used for this shipping',456), - -(10320,'2004-11-03','2004-11-13','2004-11-07','Shipped',NULL,144), - -(10321,'2004-11-04','2004-11-12','2004-11-07','Shipped',NULL,462), - -(10322,'2004-11-04','2004-11-12','2004-11-10','Shipped','Customer has worked with some of our vendors in the past and is aware of their MSRP',363), - -(10323,'2004-11-05','2004-11-12','2004-11-09','Shipped',NULL,128), - -(10324,'2004-11-05','2004-11-11','2004-11-08','Shipped',NULL,181), - -(10325,'2004-11-05','2004-11-13','2004-11-08','Shipped',NULL,121), - -(10326,'2004-11-09','2004-11-16','2004-11-10','Shipped',NULL,144), - -(10327,'2004-11-10','2004-11-19','2004-11-13','Resolved','Order was disputed and resolved on 12/1/04. The Sales Manager was involved. Customer claims the scales of the models don\'t match what was discussed.',145), - -(10328,'2004-11-12','2004-11-21','2004-11-18','Shipped','Customer very concerned about the exact color of the models. There is high risk that he may dispute the order because there is a slight color mismatch',278), - -(10329,'2004-11-15','2004-11-24','2004-11-16','Shipped',NULL,131), - -(10330,'2004-11-16','2004-11-25','2004-11-21','Shipped',NULL,385), - -(10331,'2004-11-17','2004-11-23','2004-11-23','Shipped','Customer requested special shippment. The instructions were passed along to the warehouse',486), - -(10332,'2004-11-17','2004-11-25','2004-11-18','Shipped',NULL,187), - -(10333,'2004-11-18','2004-11-27','2004-11-20','Shipped',NULL,129), - -(10334,'2004-11-19','2004-11-28',NULL,'On Hold','The outstaniding balance for this customer exceeds their credit limit. Order will be shipped when a payment is received.',144), - -(10335,'2004-11-19','2004-11-29','2004-11-23','Shipped',NULL,124), - -(10336,'2004-11-20','2004-11-26','2004-11-24','Shipped','Customer requested that DHL is used for this shipping',172), - -(10337,'2004-11-21','2004-11-30','2004-11-26','Shipped',NULL,424), - -(10338,'2004-11-22','2004-12-02','2004-11-27','Shipped',NULL,381), - -(10339,'2004-11-23','2004-11-30','2004-11-30','Shipped',NULL,398), - -(10340,'2004-11-24','2004-12-01','2004-11-25','Shipped','Customer is interested in buying more Ferrari models',216), - -(10341,'2004-11-24','2004-12-01','2004-11-29','Shipped',NULL,382), - -(10342,'2004-11-24','2004-12-01','2004-11-29','Shipped',NULL,114), - -(10343,'2004-11-24','2004-12-01','2004-11-26','Shipped',NULL,353), - -(10344,'2004-11-25','2004-12-02','2004-11-29','Shipped',NULL,350), - -(10345,'2004-11-25','2004-12-01','2004-11-26','Shipped',NULL,103), - -(10346,'2004-11-29','2004-12-05','2004-11-30','Shipped',NULL,112), - -(10347,'2004-11-29','2004-12-07','2004-11-30','Shipped','Can we deliver the new Ford Mustang models by end-of-quarter?',114), - -(10348,'2004-11-01','2004-11-08','2004-11-05','Shipped',NULL,458), - -(10349,'2004-12-01','2004-12-07','2004-12-03','Shipped',NULL,151), - -(10350,'2004-12-02','2004-12-08','2004-12-05','Shipped',NULL,141), - -(10351,'2004-12-03','2004-12-11','2004-12-07','Shipped',NULL,324), - -(10352,'2004-12-03','2004-12-12','2004-12-09','Shipped',NULL,198), - -(10353,'2004-12-04','2004-12-11','2004-12-05','Shipped',NULL,447), - -(10354,'2004-12-04','2004-12-10','2004-12-05','Shipped',NULL,323), - -(10355,'2004-12-07','2004-12-14','2004-12-13','Shipped',NULL,141), - -(10356,'2004-12-09','2004-12-15','2004-12-12','Shipped',NULL,250), - -(10357,'2004-12-10','2004-12-16','2004-12-14','Shipped',NULL,124), - -(10358,'2004-12-10','2004-12-16','2004-12-16','Shipped','Customer requested that DHL is used for this shipping',141), - -(10359,'2004-12-15','2004-12-23','2004-12-18','Shipped',NULL,353), - -(10360,'2004-12-16','2004-12-22','2004-12-18','Shipped',NULL,496), - -(10361,'2004-12-17','2004-12-24','2004-12-20','Shipped',NULL,282), - -(10362,'2005-01-05','2005-01-16','2005-01-10','Shipped',NULL,161), - -(10363,'2005-01-06','2005-01-12','2005-01-10','Shipped',NULL,334), - -(10364,'2005-01-06','2005-01-17','2005-01-09','Shipped',NULL,350), - -(10365,'2005-01-07','2005-01-18','2005-01-11','Shipped',NULL,320), - -(10366,'2005-01-10','2005-01-19','2005-01-12','Shipped',NULL,381), - -(10367,'2005-01-12','2005-01-21','2005-01-16','Resolved','This order was disputed and resolved on 2/1/2005. Customer claimed that container with shipment was damaged. FedEx\'s investigation proved this wrong.',205), - -(10368,'2005-01-19','2005-01-27','2005-01-24','Shipped','Can we renegotiate this one?',124), - -(10369,'2005-01-20','2005-01-28','2005-01-24','Shipped',NULL,379), - -(10370,'2005-01-20','2005-02-01','2005-01-25','Shipped',NULL,276), - -(10371,'2005-01-23','2005-02-03','2005-01-25','Shipped',NULL,124), - -(10372,'2005-01-26','2005-02-05','2005-01-28','Shipped',NULL,398), - -(10373,'2005-01-31','2005-02-08','2005-02-06','Shipped',NULL,311), - -(10374,'2005-02-02','2005-02-09','2005-02-03','Shipped',NULL,333), - -(10375,'2005-02-03','2005-02-10','2005-02-06','Shipped',NULL,119), - -(10376,'2005-02-08','2005-02-18','2005-02-13','Shipped',NULL,219), - -(10377,'2005-02-09','2005-02-21','2005-02-12','Shipped','Cautious optimism. We have happy customers here, if we can keep them well stocked. I need all the information I can get on the planned shippments of Porches',186), - -(10378,'2005-02-10','2005-02-18','2005-02-11','Shipped',NULL,141), - -(10379,'2005-02-10','2005-02-18','2005-02-11','Shipped',NULL,141), - -(10380,'2005-02-16','2005-02-24','2005-02-18','Shipped',NULL,141), - -(10381,'2005-02-17','2005-02-25','2005-02-18','Shipped',NULL,321), - -(10382,'2005-02-17','2005-02-23','2005-02-18','Shipped','Custom shipping instructions sent to warehouse',124), - -(10383,'2005-02-22','2005-03-02','2005-02-25','Shipped',NULL,141), - -(10384,'2005-02-23','2005-03-06','2005-02-27','Shipped',NULL,321), - -(10385,'2005-02-28','2005-03-09','2005-03-01','Shipped',NULL,124), - -(10386,'2005-03-01','2005-03-09','2005-03-06','Resolved','Disputed then Resolved on 3/15/2005. Customer doesn\'t like the craftsmaship of the models.',141), - -(10387,'2005-03-02','2005-03-09','2005-03-06','Shipped','We need to keep in close contact with their Marketing VP. He is the decision maker for all their purchases.',148), - -(10388,'2005-03-03','2005-03-11','2005-03-09','Shipped',NULL,462), - -(10389,'2005-03-03','2005-03-09','2005-03-08','Shipped',NULL,448), - -(10390,'2005-03-04','2005-03-11','2005-03-07','Shipped','They want to reevaluate their terms agreement with Finance.',124), - -(10391,'2005-03-09','2005-03-20','2005-03-15','Shipped',NULL,276), - -(10392,'2005-03-10','2005-03-18','2005-03-12','Shipped',NULL,452), - -(10393,'2005-03-11','2005-03-22','2005-03-14','Shipped','They want to reevaluate their terms agreement with Finance.',323), - -(10394,'2005-03-15','2005-03-25','2005-03-19','Shipped',NULL,141), - -(10395,'2005-03-17','2005-03-24','2005-03-23','Shipped','We must be cautions with this customer. Their VP of Sales resigned. Company may be heading down.',250), - -(10396,'2005-03-23','2005-04-02','2005-03-28','Shipped',NULL,124), - -(10397,'2005-03-28','2005-04-09','2005-04-01','Shipped',NULL,242), - -(10398,'2005-03-30','2005-04-09','2005-03-31','Shipped',NULL,353), - -(10399,'2005-04-01','2005-04-12','2005-04-03','Shipped',NULL,496), - -(10400,'2005-04-01','2005-04-11','2005-04-04','Shipped','Customer requested that DHL is used for this shipping',450), - -(10401,'2005-04-03','2005-04-14',NULL,'On Hold','Customer credit limit exceeded. Will ship when a payment is received.',328), - -(10402,'2005-04-07','2005-04-14','2005-04-12','Shipped',NULL,406), - -(10403,'2005-04-08','2005-04-18','2005-04-11','Shipped',NULL,201), - -(10404,'2005-04-08','2005-04-14','2005-04-11','Shipped',NULL,323), - -(10405,'2005-04-14','2005-04-24','2005-04-20','Shipped',NULL,209), - -(10406,'2005-04-15','2005-04-25','2005-04-21','Disputed','Customer claims container with shipment was damaged during shipping and some items were missing. I am talking to FedEx about this.',145), - -(10407,'2005-04-22','2005-05-04',NULL,'On Hold','Customer credit limit exceeded. Will ship when a payment is received.',450), - -(10408,'2005-04-22','2005-04-29','2005-04-27','Shipped',NULL,398), - -(10409,'2005-04-23','2005-05-05','2005-04-24','Shipped',NULL,166), - -(10410,'2005-04-29','2005-05-10','2005-04-30','Shipped',NULL,357), - -(10411,'2005-05-01','2005-05-08','2005-05-06','Shipped',NULL,233), - -(10412,'2005-05-03','2005-05-13','2005-05-05','Shipped',NULL,141), - -(10413,'2005-05-05','2005-05-14','2005-05-09','Shipped','Customer requested that DHL is used for this shipping',175), - -(10414,'2005-05-06','2005-05-13',NULL,'On Hold','Customer credit limit exceeded. Will ship when a payment is received.',362), - -(10415,'2005-05-09','2005-05-20','2005-05-12','Disputed','Customer claims the scales of the models don\'t match what was discussed. I keep all the paperwork though to prove otherwise',471), - -(10416,'2005-05-10','2005-05-16','2005-05-14','Shipped',NULL,386), - -(10417,'2005-05-13','2005-05-19','2005-05-19','Disputed','Customer doesn\'t like the colors and precision of the models.',141), - -(10418,'2005-05-16','2005-05-24','2005-05-20','Shipped',NULL,412), - -(10419,'2005-05-17','2005-05-28','2005-05-19','Shipped',NULL,382), - -(10420,'2005-05-29','2005-06-07',NULL,'In Process',NULL,282), - -(10421,'2005-05-29','2005-06-06',NULL,'In Process','Custom shipping instructions were sent to warehouse',124), - -(10422,'2005-05-30','2005-06-11',NULL,'In Process',NULL,157), - -(10423,'2005-05-30','2005-06-05',NULL,'In Process',NULL,314), - -(10424,'2005-05-31','2005-06-08',NULL,'In Process',NULL,141), - -(10425,'2005-05-31','2005-06-07',NULL,'In Process',NULL,119); - -/*Table structure for table `payments` */ - -DROP TABLE IF EXISTS `payments`; - -CREATE TABLE `payments` ( - `customerNumber` int(11) NOT NULL, - `checkNumber` varchar(50) NOT NULL, - `paymentDate` date NOT NULL, - `amount` decimal(10,2) NOT NULL, - PRIMARY KEY (`customerNumber`,`checkNumber`), - CONSTRAINT `payments_ibfk_1` FOREIGN KEY (`customerNumber`) REFERENCES `customers` (`customerNumber`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; - -/*Data for the table `payments` */ - -insert into `payments`(`customerNumber`,`checkNumber`,`paymentDate`,`amount`) values - -(103,'HQ336336','2004-10-19','6066.78'), - -(103,'JM555205','2003-06-05','14571.44'), - -(103,'OM314933','2004-12-18','1676.14'), - -(112,'BO864823','2004-12-17','14191.12'), - -(112,'HQ55022','2003-06-06','32641.98'), - -(112,'ND748579','2004-08-20','33347.88'), - -(114,'GG31455','2003-05-20','45864.03'), - -(114,'MA765515','2004-12-15','82261.22'), - -(114,'NP603840','2003-05-31','7565.08'), - -(114,'NR27552','2004-03-10','44894.74'), - -(119,'DB933704','2004-11-14','19501.82'), - -(119,'LN373447','2004-08-08','47924.19'), - -(119,'NG94694','2005-02-22','49523.67'), - -(121,'DB889831','2003-02-16','50218.95'), - -(121,'FD317790','2003-10-28','1491.38'), - -(121,'KI831359','2004-11-04','17876.32'), - -(121,'MA302151','2004-11-28','34638.14'), - -(124,'AE215433','2005-03-05','101244.59'), - -(124,'BG255406','2004-08-28','85410.87'), - -(124,'CQ287967','2003-04-11','11044.30'), - -(124,'ET64396','2005-04-16','83598.04'), - -(124,'HI366474','2004-12-27','47142.70'), - -(124,'HR86578','2004-11-02','55639.66'), - -(124,'KI131716','2003-08-15','111654.40'), - -(124,'LF217299','2004-03-26','43369.30'), - -(124,'NT141748','2003-11-25','45084.38'), - -(128,'DI925118','2003-01-28','10549.01'), - -(128,'FA465482','2003-10-18','24101.81'), - -(128,'FH668230','2004-03-24','33820.62'), - -(128,'IP383901','2004-11-18','7466.32'), - -(129,'DM826140','2004-12-08','26248.78'), - -(129,'ID449593','2003-12-11','23923.93'), - -(129,'PI42991','2003-04-09','16537.85'), - -(131,'CL442705','2003-03-12','22292.62'), - -(131,'MA724562','2004-12-02','50025.35'), - -(131,'NB445135','2004-09-11','35321.97'), - -(141,'AU364101','2003-07-19','36251.03'), - -(141,'DB583216','2004-11-01','36140.38'), - -(141,'DL460618','2005-05-19','46895.48'), - -(141,'HJ32686','2004-01-30','59830.55'), - -(141,'ID10962','2004-12-31','116208.40'), - -(141,'IN446258','2005-03-25','65071.26'), - -(141,'JE105477','2005-03-18','120166.58'), - -(141,'JN355280','2003-10-26','49539.37'), - -(141,'JN722010','2003-02-25','40206.20'), - -(141,'KT52578','2003-12-09','63843.55'), - -(141,'MC46946','2004-07-09','35420.74'), - -(141,'MF629602','2004-08-16','20009.53'), - -(141,'NU627706','2004-05-17','26155.91'), - -(144,'IR846303','2004-12-12','36005.71'), - -(144,'LA685678','2003-04-09','7674.94'), - -(145,'CN328545','2004-07-03','4710.73'), - -(145,'ED39322','2004-04-26','28211.70'), - -(145,'HR182688','2004-12-01','20564.86'), - -(145,'JJ246391','2003-02-20','53959.21'), - -(146,'FP549817','2004-03-18','40978.53'), - -(146,'FU793410','2004-01-16','49614.72'), - -(146,'LJ160635','2003-12-10','39712.10'), - -(148,'BI507030','2003-04-22','44380.15'), - -(148,'DD635282','2004-08-11','2611.84'), - -(148,'KM172879','2003-12-26','105743.00'), - -(148,'ME497970','2005-03-27','3516.04'), - -(151,'BF686658','2003-12-22','58793.53'), - -(151,'GB852215','2004-07-26','20314.44'), - -(151,'IP568906','2003-06-18','58841.35'), - -(151,'KI884577','2004-12-14','39964.63'), - -(157,'HI618861','2004-11-19','35152.12'), - -(157,'NN711988','2004-09-07','63357.13'), - -(161,'BR352384','2004-11-14','2434.25'), - -(161,'BR478494','2003-11-18','50743.65'), - -(161,'KG644125','2005-02-02','12692.19'), - -(161,'NI908214','2003-08-05','38675.13'), - -(166,'BQ327613','2004-09-16','38785.48'), - -(166,'DC979307','2004-07-07','44160.92'), - -(166,'LA318629','2004-02-28','22474.17'), - -(167,'ED743615','2004-09-19','12538.01'), - -(167,'GN228846','2003-12-03','85024.46'), - -(171,'GB878038','2004-03-15','18997.89'), - -(171,'IL104425','2003-11-22','42783.81'), - -(172,'AD832091','2004-09-09','1960.80'), - -(172,'CE51751','2004-12-04','51209.58'), - -(172,'EH208589','2003-04-20','33383.14'), - -(173,'GP545698','2004-05-13','11843.45'), - -(173,'IG462397','2004-03-29','20355.24'), - -(175,'CITI3434344','2005-05-19','28500.78'), - -(175,'IO448913','2003-11-19','24879.08'), - -(175,'PI15215','2004-07-10','42044.77'), - -(177,'AU750837','2004-04-17','15183.63'), - -(177,'CI381435','2004-01-19','47177.59'), - -(181,'CM564612','2004-04-25','22602.36'), - -(181,'GQ132144','2003-01-30','5494.78'), - -(181,'OH367219','2004-11-16','44400.50'), - -(186,'AE192287','2005-03-10','23602.90'), - -(186,'AK412714','2003-10-27','37602.48'), - -(186,'KA602407','2004-10-21','34341.08'), - -(187,'AM968797','2004-11-03','52825.29'), - -(187,'BQ39062','2004-12-08','47159.11'), - -(187,'KL124726','2003-03-27','48425.69'), - -(189,'BO711618','2004-10-03','17359.53'), - -(189,'NM916675','2004-03-01','32538.74'), - -(198,'FI192930','2004-12-06','9658.74'), - -(198,'HQ920205','2003-07-06','6036.96'), - -(198,'IS946883','2004-09-21','5858.56'), - -(201,'DP677013','2003-10-20','23908.24'), - -(201,'OO846801','2004-06-15','37258.94'), - -(202,'HI358554','2003-12-18','36527.61'), - -(202,'IQ627690','2004-11-08','33594.58'), - -(204,'GC697638','2004-08-13','51152.86'), - -(204,'IS150005','2004-09-24','4424.40'), - -(205,'GL756480','2003-12-04','3879.96'), - -(205,'LL562733','2003-09-05','50342.74'), - -(205,'NM739638','2005-02-06','39580.60'), - -(209,'BOAF82044','2005-05-03','35157.75'), - -(209,'ED520529','2004-06-21','4632.31'), - -(209,'PH785937','2004-05-04','36069.26'), - -(211,'BJ535230','2003-12-09','45480.79'), - -(216,'BG407567','2003-05-09','3101.40'), - -(216,'ML780814','2004-12-06','24945.21'), - -(216,'MM342086','2003-12-14','40473.86'), - -(219,'BN17870','2005-03-02','3452.75'), - -(219,'BR941480','2003-10-18','4465.85'), - -(227,'MQ413968','2003-10-31','36164.46'), - -(227,'NU21326','2004-11-02','53745.34'), - -(233,'BOFA23232','2005-05-20','29070.38'), - -(233,'II180006','2004-07-01','22997.45'), - -(233,'JG981190','2003-11-18','16909.84'), - -(239,'NQ865547','2004-03-15','80375.24'), - -(240,'IF245157','2004-11-16','46788.14'), - -(240,'JO719695','2004-03-28','24995.61'), - -(242,'AF40894','2003-11-22','33818.34'), - -(242,'HR224331','2005-06-03','12432.32'), - -(242,'KI744716','2003-07-21','14232.70'), - -(249,'IJ399820','2004-09-19','33924.24'), - -(249,'NE404084','2004-09-04','48298.99'), - -(250,'EQ12267','2005-05-17','17928.09'), - -(250,'HD284647','2004-12-30','26311.63'), - -(250,'HN114306','2003-07-18','23419.47'), - -(256,'EP227123','2004-02-10','5759.42'), - -(256,'HE84936','2004-10-22','53116.99'), - -(259,'EU280955','2004-11-06','61234.67'), - -(259,'GB361972','2003-12-07','27988.47'), - -(260,'IO164641','2004-08-30','37527.58'), - -(260,'NH776924','2004-04-24','29284.42'), - -(276,'EM979878','2005-02-09','27083.78'), - -(276,'KM841847','2003-11-13','38547.19'), - -(276,'LE432182','2003-09-28','41554.73'), - -(276,'OJ819725','2005-04-30','29848.52'), - -(278,'BJ483870','2004-12-05','37654.09'), - -(278,'GP636783','2003-03-02','52151.81'), - -(278,'NI983021','2003-11-24','37723.79'), - -(282,'IA793562','2003-08-03','24013.52'), - -(282,'JT819493','2004-08-02','35806.73'), - -(282,'OD327378','2005-01-03','31835.36'), - -(286,'DR578578','2004-10-28','47411.33'), - -(286,'KH910279','2004-09-05','43134.04'), - -(298,'AJ574927','2004-03-13','47375.92'), - -(298,'LF501133','2004-09-18','61402.00'), - -(299,'AD304085','2003-10-24','36798.88'), - -(299,'NR157385','2004-09-05','32260.16'), - -(311,'DG336041','2005-02-15','46770.52'), - -(311,'FA728475','2003-10-06','32723.04'), - -(311,'NQ966143','2004-04-25','16212.59'), - -(314,'LQ244073','2004-08-09','45352.47'), - -(314,'MD809704','2004-03-03','16901.38'), - -(319,'HL685576','2004-11-06','42339.76'), - -(319,'OM548174','2003-12-07','36092.40'), - -(320,'GJ597719','2005-01-18','8307.28'), - -(320,'HO576374','2003-08-20','41016.75'), - -(320,'MU817160','2003-11-24','52548.49'), - -(321,'DJ15149','2003-11-03','85559.12'), - -(321,'LA556321','2005-03-15','46781.66'), - -(323,'AL493079','2005-05-23','75020.13'), - -(323,'ES347491','2004-06-24','37281.36'), - -(323,'HG738664','2003-07-05','2880.00'), - -(323,'PQ803830','2004-12-24','39440.59'), - -(324,'DQ409197','2004-12-13','13671.82'), - -(324,'FP443161','2003-07-07','29429.14'), - -(324,'HB150714','2003-11-23','37455.77'), - -(328,'EN930356','2004-04-16','7178.66'), - -(328,'NR631421','2004-05-30','31102.85'), - -(333,'HL209210','2003-11-15','23936.53'), - -(333,'JK479662','2003-10-17','9821.32'), - -(333,'NF959653','2005-03-01','21432.31'), - -(334,'CS435306','2005-01-27','45785.34'), - -(334,'HH517378','2003-08-16','29716.86'), - -(334,'LF737277','2004-05-22','28394.54'), - -(339,'AP286625','2004-10-24','23333.06'), - -(339,'DA98827','2003-11-28','34606.28'), - -(344,'AF246722','2003-11-24','31428.21'), - -(344,'NJ906924','2004-04-02','15322.93'), - -(347,'DG700707','2004-01-18','21053.69'), - -(347,'LG808674','2003-10-24','20452.50'), - -(350,'BQ602907','2004-12-11','18888.31'), - -(350,'CI471510','2003-05-25','50824.66'), - -(350,'OB648482','2005-01-29','1834.56'), - -(353,'CO351193','2005-01-10','49705.52'), - -(353,'ED878227','2003-07-21','13920.26'), - -(353,'GT878649','2003-05-21','16700.47'), - -(353,'HJ618252','2005-06-09','46656.94'), - -(357,'AG240323','2003-12-16','20220.04'), - -(357,'NB291497','2004-05-15','36442.34'), - -(362,'FP170292','2004-07-11','18473.71'), - -(362,'OG208861','2004-09-21','15059.76'), - -(363,'HL575273','2004-11-17','50799.69'), - -(363,'IS232033','2003-01-16','10223.83'), - -(363,'PN238558','2003-12-05','55425.77'), - -(379,'CA762595','2005-02-12','28322.83'), - -(379,'FR499138','2003-09-16','32680.31'), - -(379,'GB890854','2004-08-02','12530.51'), - -(381,'BC726082','2004-12-03','12081.52'), - -(381,'CC475233','2003-04-19','1627.56'), - -(381,'GB117430','2005-02-03','14379.90'), - -(381,'MS154481','2003-08-22','1128.20'), - -(382,'CC871084','2003-05-12','35826.33'), - -(382,'CT821147','2004-08-01','6419.84'), - -(382,'PH29054','2004-11-27','42813.83'), - -(385,'BN347084','2003-12-02','20644.24'), - -(385,'CP804873','2004-11-19','15822.84'), - -(385,'EK785462','2003-03-09','51001.22'), - -(386,'DO106109','2003-11-18','38524.29'), - -(386,'HG438769','2004-07-18','51619.02'), - -(398,'AJ478695','2005-02-14','33967.73'), - -(398,'DO787644','2004-06-21','22037.91'), - -(398,'JPMR4544','2005-05-18','615.45'), - -(398,'KB54275','2004-11-29','48927.64'), - -(406,'BJMPR4545','2005-04-23','12190.85'), - -(406,'HJ217687','2004-01-28','49165.16'), - -(406,'NA197101','2004-06-17','25080.96'), - -(412,'GH197075','2004-07-25','35034.57'), - -(412,'PJ434867','2004-04-14','31670.37'), - -(415,'ER54537','2004-09-28','31310.09'), - -(424,'KF480160','2004-12-07','25505.98'), - -(424,'LM271923','2003-04-16','21665.98'), - -(424,'OA595449','2003-10-31','22042.37'), - -(447,'AO757239','2003-09-15','6631.36'), - -(447,'ER615123','2003-06-25','17032.29'), - -(447,'OU516561','2004-12-17','26304.13'), - -(448,'FS299615','2005-04-18','27966.54'), - -(448,'KR822727','2004-09-30','48809.90'), - -(450,'EF485824','2004-06-21','59551.38'), - -(452,'ED473873','2003-11-15','27121.90'), - -(452,'FN640986','2003-11-20','15130.97'), - -(452,'HG635467','2005-05-03','8807.12'), - -(455,'HA777606','2003-12-05','38139.18'), - -(455,'IR662429','2004-05-12','32239.47'), - -(456,'GJ715659','2004-11-13','27550.51'), - -(456,'MO743231','2004-04-30','1679.92'), - -(458,'DD995006','2004-11-15','33145.56'), - -(458,'NA377824','2004-02-06','22162.61'), - -(458,'OO606861','2003-06-13','57131.92'), - -(462,'ED203908','2005-04-15','30293.77'), - -(462,'GC60330','2003-11-08','9977.85'), - -(462,'PE176846','2004-11-27','48355.87'), - -(471,'AB661578','2004-07-28','9415.13'), - -(471,'CO645196','2003-12-10','35505.63'), - -(473,'LL427009','2004-02-17','7612.06'), - -(473,'PC688499','2003-10-27','17746.26'), - -(475,'JP113227','2003-12-09','7678.25'), - -(475,'PB951268','2004-02-13','36070.47'), - -(484,'GK294076','2004-10-26','3474.66'), - -(484,'JH546765','2003-11-29','47513.19'), - -(486,'BL66528','2004-04-14','5899.38'), - -(486,'HS86661','2004-11-23','45994.07'), - -(486,'JB117768','2003-03-20','25833.14'), - -(487,'AH612904','2003-09-28','29997.09'), - -(487,'PT550181','2004-02-29','12573.28'), - -(489,'OC773849','2003-12-04','22275.73'), - -(489,'PO860906','2004-01-31','7310.42'), - -(495,'BH167026','2003-12-26','59265.14'), - -(495,'FN155234','2004-05-14','6276.60'), - -(496,'EU531600','2005-05-25','30253.75'), - -(496,'MB342426','2003-07-16','32077.44'), - -(496,'MN89921','2004-12-31','52166.00'); - -/*Table structure for table `productlines` */ - -DROP TABLE IF EXISTS `productlines`; - -CREATE TABLE `productlines` ( - `productLine` varchar(50) NOT NULL, - `textDescription` varchar(4000) DEFAULT NULL, - `htmlDescription` mediumtext, - `image` mediumblob, - PRIMARY KEY (`productLine`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; - -/*Data for the table `productlines` */ - -insert into `productlines`(`productLine`,`textDescription`,`htmlDescription`,`image`) values - -('Classic Cars','Attention car enthusiasts: Make your wildest car ownership dreams come true. Whether you are looking for classic muscle cars, dream sports cars or movie-inspired miniatures, you will find great choices in this category. These replicas feature superb attention to detail and craftsmanship and offer features such as working steering system, opening forward compartment, opening rear trunk with removable spare wheel, 4-wheel independent spring suspension, and so on. The models range in size from 1:10 to 1:24 scale and include numerous limited edition and several out-of-production vehicles. All models include a certificate of authenticity from their manufacturers and come fully assembled and ready for display in the home or office.',NULL,NULL), - -('Motorcycles','Our motorcycles are state of the art replicas of classic as well as contemporary motorcycle legends such as Harley Davidson, Ducati and Vespa. Models contain stunning details such as official logos, rotating wheels, working kickstand, front suspension, gear-shift lever, footbrake lever, and drive chain. Materials used include diecast and plastic. The models range in size from 1:10 to 1:50 scale and include numerous limited edition and several out-of-production vehicles. All models come fully assembled and ready for display in the home or office. Most include a certificate of authenticity.',NULL,NULL), - -('Planes','Unique, diecast airplane and helicopter replicas suitable for collections, as well as home, office or classroom decorations. Models contain stunning details such as official logos and insignias, rotating jet engines and propellers, retractable wheels, and so on. Most come fully assembled and with a certificate of authenticity from their manufacturers.',NULL,NULL), - -('Ships','The perfect holiday or anniversary gift for executives, clients, friends, and family. These handcrafted model ships are unique, stunning works of art that will be treasured for generations! They come fully assembled and ready for display in the home or office. We guarantee the highest quality, and best value.',NULL,NULL), - -('Trains','Model trains are a rewarding hobby for enthusiasts of all ages. Whether you\'re looking for collectible wooden trains, electric streetcars or locomotives, you\'ll find a number of great choices for any budget within this category. The interactive aspect of trains makes toy trains perfect for young children. The wooden train sets are ideal for children under the age of 5.',NULL,NULL), - -('Trucks and Buses','The Truck and Bus models are realistic replicas of buses and specialized trucks produced from the early 1920s to present. The models range in size from 1:12 to 1:50 scale and include numerous limited edition and several out-of-production vehicles. Materials used include tin, diecast and plastic. All models include a certificate of authenticity from their manufacturers and are a perfect ornament for the home and office.',NULL,NULL), - -('Vintage Cars','Our Vintage Car models realistically portray automobiles produced from the early 1900s through the 1940s. Materials used include Bakelite, diecast, plastic and wood. Most of the replicas are in the 1:18 and 1:24 scale sizes, which provide the optimum in detail and accuracy. Prices range from $30.00 up to $180.00 for some special limited edition replicas. All models include a certificate of authenticity from their manufacturers and come fully assembled and ready for display in the home or office.',NULL,NULL); - -/*Table structure for table `products` */ - -DROP TABLE IF EXISTS `products`; - -CREATE TABLE `products` ( - `productCode` varchar(15) NOT NULL, - `productName` varchar(70) NOT NULL, - `productLine` varchar(50) NOT NULL, - `productScale` varchar(10) NOT NULL, - `productVendor` varchar(50) NOT NULL, - `productDescription` text NOT NULL, - `quantityInStock` smallint(6) NOT NULL, - `buyPrice` decimal(10,2) NOT NULL, - `MSRP` decimal(10,2) NOT NULL, - PRIMARY KEY (`productCode`), - KEY `productLine` (`productLine`), - CONSTRAINT `products_ibfk_1` FOREIGN KEY (`productLine`) REFERENCES `productlines` (`productLine`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; - -/*Data for the table `products` */ - -insert into `products`(`productCode`,`productName`,`productLine`,`productScale`,`productVendor`,`productDescription`,`quantityInStock`,`buyPrice`,`MSRP`) values - -('S10_1678','1969 Harley Davidson Ultimate Chopper','Motorcycles','1:10','Min Lin Diecast','This replica features working kickstand, front suspension, gear-shift lever, footbrake lever, drive chain, wheels and steering. All parts are particularly delicate due to their precise scale and require special care and attention.',7933,'48.81','95.70'), - -('S10_1949','1952 Alpine Renault 1300','Classic Cars','1:10','Classic Metal Creations','Turnable front wheels; steering function; detailed interior; detailed engine; opening hood; opening trunk; opening doors; and detailed chassis.',7305,'98.58','214.30'), - -('S10_2016','1996 Moto Guzzi 1100i','Motorcycles','1:10','Highway 66 Mini Classics','Official Moto Guzzi logos and insignias, saddle bags located on side of motorcycle, detailed engine, working steering, working suspension, two leather seats, luggage rack, dual exhaust pipes, small saddle bag located on handle bars, two-tone paint with chrome accents, superior die-cast detail , rotating wheels , working kick stand, diecast metal with plastic parts and baked enamel finish.',6625,'68.99','118.94'), - -('S10_4698','2003 Harley-Davidson Eagle Drag Bike','Motorcycles','1:10','Red Start Diecast','Model features, official Harley Davidson logos and insignias, detachable rear wheelie bar, heavy diecast metal with resin parts, authentic multi-color tampo-printed graphics, separate engine drive belts, free-turning front fork, rotating tires and rear racing slick, certificate of authenticity, detailed engine, display stand\r\n, precision diecast replica, baked enamel finish, 1:10 scale model, removable fender, seat and tank cover piece for displaying the superior detail of the v-twin engine',5582,'91.02','193.66'), - -('S10_4757','1972 Alfa Romeo GTA','Classic Cars','1:10','Motor City Art Classics','Features include: Turnable front wheels; steering function; detailed interior; detailed engine; opening hood; opening trunk; opening doors; and detailed chassis.',3252,'85.68','136.00'), - -('S10_4962','1962 LanciaA Delta 16V','Classic Cars','1:10','Second Gear Diecast','Features include: Turnable front wheels; steering function; detailed interior; detailed engine; opening hood; opening trunk; opening doors; and detailed chassis.',6791,'103.42','147.74'), - -('S12_1099','1968 Ford Mustang','Classic Cars','1:12','Autoart Studio Design','Hood, doors and trunk all open to reveal highly detailed interior features. Steering wheel actually turns the front wheels. Color dark green.',68,'95.34','194.57'), - -('S12_1108','2001 Ferrari Enzo','Classic Cars','1:12','Second Gear Diecast','Turnable front wheels; steering function; detailed interior; detailed engine; opening hood; opening trunk; opening doors; and detailed chassis.',3619,'95.59','207.80'), - -('S12_1666','1958 Setra Bus','Trucks and Buses','1:12','Welly Diecast Productions','Model features 30 windows, skylights & glare resistant glass, working steering system, original logos',1579,'77.90','136.67'), - -('S12_2823','2002 Suzuki XREO','Motorcycles','1:12','Unimax Art Galleries','Official logos and insignias, saddle bags located on side of motorcycle, detailed engine, working steering, working suspension, two leather seats, luggage rack, dual exhaust pipes, small saddle bag located on handle bars, two-tone paint with chrome accents, superior die-cast detail , rotating wheels , working kick stand, diecast metal with plastic parts and baked enamel finish.',9997,'66.27','150.62'), - -('S12_3148','1969 Corvair Monza','Classic Cars','1:18','Welly Diecast Productions','1:18 scale die-cast about 10\" long doors open, hood opens, trunk opens and wheels roll',6906,'89.14','151.08'), - -('S12_3380','1968 Dodge Charger','Classic Cars','1:12','Welly Diecast Productions','1:12 scale model of a 1968 Dodge Charger. Hood, doors and trunk all open to reveal highly detailed interior features. Steering wheel actually turns the front wheels. Color black',9123,'75.16','117.44'), - -('S12_3891','1969 Ford Falcon','Classic Cars','1:12','Second Gear Diecast','Turnable front wheels; steering function; detailed interior; detailed engine; opening hood; opening trunk; opening doors; and detailed chassis.',1049,'83.05','173.02'), - -('S12_3990','1970 Plymouth Hemi Cuda','Classic Cars','1:12','Studio M Art Models','Very detailed 1970 Plymouth Cuda model in 1:12 scale. The Cuda is generally accepted as one of the fastest original muscle cars from the 1970s. This model is a reproduction of one of the orginal 652 cars built in 1970. Red color.',5663,'31.92','79.80'), - -('S12_4473','1957 Chevy Pickup','Trucks and Buses','1:12','Exoto Designs','1:12 scale die-cast about 20\" long Hood opens, Rubber wheels',6125,'55.70','118.50'), - -('S12_4675','1969 Dodge Charger','Classic Cars','1:12','Welly Diecast Productions','Detailed model of the 1969 Dodge Charger. This model includes finely detailed interior and exterior features. Painted in red and white.',7323,'58.73','115.16'), - -('S18_1097','1940 Ford Pickup Truck','Trucks and Buses','1:18','Studio M Art Models','This model features soft rubber tires, working steering, rubber mud guards, authentic Ford logos, detailed undercarriage, opening doors and hood, removable split rear gate, full size spare mounted in bed, detailed interior with opening glove box',2613,'58.33','116.67'), - -('S18_1129','1993 Mazda RX-7','Classic Cars','1:18','Highway 66 Mini Classics','This model features, opening hood, opening doors, detailed engine, rear spoiler, opening trunk, working steering, tinted windows, baked enamel finish. Color red.',3975,'83.51','141.54'), - -('S18_1342','1937 Lincoln Berline','Vintage Cars','1:18','Motor City Art Classics','Features opening engine cover, doors, trunk, and fuel filler cap. Color black',8693,'60.62','102.74'), - -('S18_1367','1936 Mercedes-Benz 500K Special Roadster','Vintage Cars','1:18','Studio M Art Models','This 1:18 scale replica is constructed of heavy die-cast metal and has all the features of the original: working doors and rumble seat, independent spring suspension, detailed interior, working steering system, and a bifold hood that reveals an engine so accurate that it even includes the wiring. All this is topped off with a baked enamel finish. Color white.',8635,'24.26','53.91'), - -('S18_1589','1965 Aston Martin DB5','Classic Cars','1:18','Classic Metal Creations','Die-cast model of the silver 1965 Aston Martin DB5 in silver. This model includes full wire wheels and doors that open with fully detailed passenger compartment. In 1:18 scale, this model measures approximately 10 inches/20 cm long.',9042,'65.96','124.44'), - -('S18_1662','1980s Black Hawk Helicopter','Planes','1:18','Red Start Diecast','1:18 scale replica of actual Army\'s UH-60L BLACK HAWK Helicopter. 100% hand-assembled. Features rotating rotor blades, propeller blades and rubber wheels.',5330,'77.27','157.69'), - -('S18_1749','1917 Grand Touring Sedan','Vintage Cars','1:18','Welly Diecast Productions','This 1:18 scale replica of the 1917 Grand Touring car has all the features you would expect from museum quality reproductions: all four doors and bi-fold hood opening, detailed engine and instrument panel, chrome-look trim, and tufted upholstery, all topped off with a factory baked-enamel finish.',2724,'86.70','170.00'), - -('S18_1889','1948 Porsche 356-A Roadster','Classic Cars','1:18','Gearbox Collectibles','This precision die-cast replica features opening doors, superb detail and craftsmanship, working steering system, opening forward compartment, opening rear trunk with removable spare, 4 wheel independent spring suspension as well as factory baked enamel finish.',8826,'53.90','77.00'), - -('S18_1984','1995 Honda Civic','Classic Cars','1:18','Min Lin Diecast','This model features, opening hood, opening doors, detailed engine, rear spoiler, opening trunk, working steering, tinted windows, baked enamel finish. Color yellow.',9772,'93.89','142.25'), - -('S18_2238','1998 Chrysler Plymouth Prowler','Classic Cars','1:18','Gearbox Collectibles','Turnable front wheels; steering function; detailed interior; detailed engine; opening hood; opening trunk; opening doors; and detailed chassis.',4724,'101.51','163.73'), - -('S18_2248','1911 Ford Town Car','Vintage Cars','1:18','Motor City Art Classics','Features opening hood, opening doors, opening trunk, wide white wall tires, front door arm rests, working steering system.',540,'33.30','60.54'), - -('S18_2319','1964 Mercedes Tour Bus','Trucks and Buses','1:18','Unimax Art Galleries','Exact replica. 100+ parts. working steering system, original logos',8258,'74.86','122.73'), - -('S18_2325','1932 Model A Ford J-Coupe','Vintage Cars','1:18','Autoart Studio Design','This model features grille-mounted chrome horn, lift-up louvered hood, fold-down rumble seat, working steering system, chrome-covered spare, opening doors, detailed and wired engine',9354,'58.48','127.13'), - -('S18_2432','1926 Ford Fire Engine','Trucks and Buses','1:18','Carousel DieCast Legends','Gleaming red handsome appearance. Everything is here the fire hoses, ladder, axes, bells, lanterns, ready to fight any inferno.',2018,'24.92','60.77'), - -('S18_2581','P-51-D Mustang','Planes','1:72','Gearbox Collectibles','Has retractable wheels and comes with a stand',992,'49.00','84.48'), - -('S18_2625','1936 Harley Davidson El Knucklehead','Motorcycles','1:18','Welly Diecast Productions','Intricately detailed with chrome accents and trim, official die-struck logos and baked enamel finish.',4357,'24.23','60.57'), - -('S18_2795','1928 Mercedes-Benz SSK','Vintage Cars','1:18','Gearbox Collectibles','This 1:18 replica features grille-mounted chrome horn, lift-up louvered hood, fold-down rumble seat, working steering system, chrome-covered spare, opening doors, detailed and wired engine. Color black.',548,'72.56','168.75'), - -('S18_2870','1999 Indy 500 Monte Carlo SS','Classic Cars','1:18','Red Start Diecast','Features include opening and closing doors. Color: Red',8164,'56.76','132.00'), - -('S18_2949','1913 Ford Model T Speedster','Vintage Cars','1:18','Carousel DieCast Legends','This 250 part reproduction includes moving handbrakes, clutch, throttle and foot pedals, squeezable horn, detailed wired engine, removable water, gas, and oil cans, pivoting monocle windshield, all topped with a baked enamel red finish. Each replica comes with an Owners Title and Certificate of Authenticity. Color red.',4189,'60.78','101.31'), - -('S18_2957','1934 Ford V8 Coupe','Vintage Cars','1:18','Min Lin Diecast','Chrome Trim, Chrome Grille, Opening Hood, Opening Doors, Opening Trunk, Detailed Engine, Working Steering System',5649,'34.35','62.46'), - -('S18_3029','1999 Yamaha Speed Boat','Ships','1:18','Min Lin Diecast','Exact replica. Wood and Metal. Many extras including rigging, long boats, pilot house, anchors, etc. Comes with three masts, all square-rigged.',4259,'51.61','86.02'), - -('S18_3136','18th Century Vintage Horse Carriage','Vintage Cars','1:18','Red Start Diecast','Hand crafted diecast-like metal horse carriage is re-created in about 1:18 scale of antique horse carriage. This antique style metal Stagecoach is all hand-assembled with many different parts.\r\n\r\nThis collectible metal horse carriage is painted in classic Red, and features turning steering wheel and is entirely hand-finished.',5992,'60.74','104.72'), - -('S18_3140','1903 Ford Model A','Vintage Cars','1:18','Unimax Art Galleries','Features opening trunk, working steering system',3913,'68.30','136.59'), - -('S18_3232','1992 Ferrari 360 Spider red','Classic Cars','1:18','Unimax Art Galleries','his replica features opening doors, superb detail and craftsmanship, working steering system, opening forward compartment, opening rear trunk with removable spare, 4 wheel independent spring suspension as well as factory baked enamel finish.',8347,'77.90','169.34'), - -('S18_3233','1985 Toyota Supra','Classic Cars','1:18','Highway 66 Mini Classics','This model features soft rubber tires, working steering, rubber mud guards, authentic Ford logos, detailed undercarriage, opening doors and hood, removable split rear gate, full size spare mounted in bed, detailed interior with opening glove box',7733,'57.01','107.57'), - -('S18_3259','Collectable Wooden Train','Trains','1:18','Carousel DieCast Legends','Hand crafted wooden toy train set is in about 1:18 scale, 25 inches in total length including 2 additional carts, of actual vintage train. This antique style wooden toy train model set is all hand-assembled with 100% wood.',6450,'67.56','100.84'), - -('S18_3278','1969 Dodge Super Bee','Classic Cars','1:18','Min Lin Diecast','This replica features opening doors, superb detail and craftsmanship, working steering system, opening forward compartment, opening rear trunk with removable spare, 4 wheel independent spring suspension as well as factory baked enamel finish.',1917,'49.05','80.41'), - -('S18_3320','1917 Maxwell Touring Car','Vintage Cars','1:18','Exoto Designs','Features Gold Trim, Full Size Spare Tire, Chrome Trim, Chrome Grille, Opening Hood, Opening Doors, Opening Trunk, Detailed Engine, Working Steering System',7913,'57.54','99.21'), - -('S18_3482','1976 Ford Gran Torino','Classic Cars','1:18','Gearbox Collectibles','Highly detailed 1976 Ford Gran Torino \"Starsky and Hutch\" diecast model. Very well constructed and painted in red and white patterns.',9127,'73.49','146.99'), - -('S18_3685','1948 Porsche Type 356 Roadster','Classic Cars','1:18','Gearbox Collectibles','This model features working front and rear suspension on accurately replicated and actuating shock absorbers as well as opening engine cover, rear stabilizer flap, and 4 opening doors.',8990,'62.16','141.28'), - -('S18_3782','1957 Vespa GS150','Motorcycles','1:18','Studio M Art Models','Features rotating wheels , working kick stand. Comes with stand.',7689,'32.95','62.17'), - -('S18_3856','1941 Chevrolet Special Deluxe Cabriolet','Vintage Cars','1:18','Exoto Designs','Features opening hood, opening doors, opening trunk, wide white wall tires, front door arm rests, working steering system, leather upholstery. Color black.',2378,'64.58','105.87'), - -('S18_4027','1970 Triumph Spitfire','Classic Cars','1:18','Min Lin Diecast','Features include opening and closing doors. Color: White.',5545,'91.92','143.62'), - -('S18_4409','1932 Alfa Romeo 8C2300 Spider Sport','Vintage Cars','1:18','Exoto Designs','This 1:18 scale precision die cast replica features the 6 front headlights of the original, plus a detailed version of the 142 horsepower straight 8 engine, dual spares and their famous comprehensive dashboard. Color black.',6553,'43.26','92.03'), - -('S18_4522','1904 Buick Runabout','Vintage Cars','1:18','Exoto Designs','Features opening trunk, working steering system',8290,'52.66','87.77'), - -('S18_4600','1940s Ford truck','Trucks and Buses','1:18','Motor City Art Classics','This 1940s Ford Pick-Up truck is re-created in 1:18 scale of original 1940s Ford truck. This antique style metal 1940s Ford Flatbed truck is all hand-assembled. This collectible 1940\'s Pick-Up truck is painted in classic dark green color, and features rotating wheels.',3128,'84.76','121.08'), - -('S18_4668','1939 Cadillac Limousine','Vintage Cars','1:18','Studio M Art Models','Features completely detailed interior including Velvet flocked drapes,deluxe wood grain floor, and a wood grain casket with seperate chrome handles',6645,'23.14','50.31'), - -('S18_4721','1957 Corvette Convertible','Classic Cars','1:18','Classic Metal Creations','1957 die cast Corvette Convertible in Roman Red with white sides and whitewall tires. 1:18 scale quality die-cast with detailed engine and underbvody. Now you can own The Classic Corvette.',1249,'69.93','148.80'), - -('S18_4933','1957 Ford Thunderbird','Classic Cars','1:18','Studio M Art Models','This 1:18 scale precision die-cast replica, with its optional porthole hardtop and factory baked-enamel Thunderbird Bronze finish, is a 100% accurate rendition of this American classic.',3209,'34.21','71.27'), - -('S24_1046','1970 Chevy Chevelle SS 454','Classic Cars','1:24','Unimax Art Galleries','This model features rotating wheels, working streering system and opening doors. All parts are particularly delicate due to their precise scale and require special care and attention. It should not be picked up by the doors, roof, hood or trunk.',1005,'49.24','73.49'), - -('S24_1444','1970 Dodge Coronet','Classic Cars','1:24','Highway 66 Mini Classics','1:24 scale die-cast about 18\" long doors open, hood opens and rubber wheels',4074,'32.37','57.80'), - -('S24_1578','1997 BMW R 1100 S','Motorcycles','1:24','Autoart Studio Design','Detailed scale replica with working suspension and constructed from over 70 parts',7003,'60.86','112.70'), - -('S24_1628','1966 Shelby Cobra 427 S/C','Classic Cars','1:24','Carousel DieCast Legends','This diecast model of the 1966 Shelby Cobra 427 S/C includes many authentic details and operating parts. The 1:24 scale model of this iconic lighweight sports car from the 1960s comes in silver and it\'s own display case.',8197,'29.18','50.31'), - -('S24_1785','1928 British Royal Navy Airplane','Planes','1:24','Classic Metal Creations','Official logos and insignias',3627,'66.74','109.42'), - -('S24_1937','1939 Chevrolet Deluxe Coupe','Vintage Cars','1:24','Motor City Art Classics','This 1:24 scale die-cast replica of the 1939 Chevrolet Deluxe Coupe has the same classy look as the original. Features opening trunk, hood and doors and a showroom quality baked enamel finish.',7332,'22.57','33.19'), - -('S24_2000','1960 BSA Gold Star DBD34','Motorcycles','1:24','Highway 66 Mini Classics','Detailed scale replica with working suspension and constructed from over 70 parts',15,'37.32','76.17'), - -('S24_2011','18th century schooner','Ships','1:24','Carousel DieCast Legends','All wood with canvas sails. Many extras including rigging, long boats, pilot house, anchors, etc. Comes with 4 masts, all square-rigged.',1898,'82.34','122.89'), - -('S24_2022','1938 Cadillac V-16 Presidential Limousine','Vintage Cars','1:24','Classic Metal Creations','This 1:24 scale precision die cast replica of the 1938 Cadillac V-16 Presidential Limousine has all the details of the original, from the flags on the front to an opening back seat compartment complete with telephone and rifle. Features factory baked-enamel black finish, hood goddess ornament, working jump seats.',2847,'20.61','44.80'), - -('S24_2300','1962 Volkswagen Microbus','Trucks and Buses','1:24','Autoart Studio Design','This 1:18 scale die cast replica of the 1962 Microbus is loaded with features: A working steering system, opening front doors and tailgate, and famous two-tone factory baked enamel finish, are all topped of by the sliding, real fabric, sunroof.',2327,'61.34','127.79'), - -('S24_2360','1982 Ducati 900 Monster','Motorcycles','1:24','Highway 66 Mini Classics','Features two-tone paint with chrome accents, superior die-cast detail , rotating wheels , working kick stand',6840,'47.10','69.26'), - -('S24_2766','1949 Jaguar XK 120','Classic Cars','1:24','Classic Metal Creations','Precision-engineered from original Jaguar specification in perfect scale ratio. Features opening doors, superb detail and craftsmanship, working steering system, opening forward compartment, opening rear trunk with removable spare, 4 wheel independent spring suspension as well as factory baked enamel finish.',2350,'47.25','90.87'), - -('S24_2840','1958 Chevy Corvette Limited Edition','Classic Cars','1:24','Carousel DieCast Legends','The operating parts of this 1958 Chevy Corvette Limited Edition are particularly delicate due to their precise scale and require special care and attention. Features rotating wheels, working streering, opening doors and trunk. Color dark green.',2542,'15.91','35.36'), - -('S24_2841','1900s Vintage Bi-Plane','Planes','1:24','Autoart Studio Design','Hand crafted diecast-like metal bi-plane is re-created in about 1:24 scale of antique pioneer airplane. All hand-assembled with many different parts. Hand-painted in classic yellow and features correct markings of original airplane.',5942,'34.25','68.51'), - -('S24_2887','1952 Citroen-15CV','Classic Cars','1:24','Exoto Designs','Precision crafted hand-assembled 1:18 scale reproduction of the 1952 15CV, with its independent spring suspension, working steering system, opening doors and hood, detailed engine and instrument panel, all topped of with a factory fresh baked enamel finish.',1452,'72.82','117.44'), - -('S24_2972','1982 Lamborghini Diablo','Classic Cars','1:24','Second Gear Diecast','This replica features opening doors, superb detail and craftsmanship, working steering system, opening forward compartment, opening rear trunk with removable spare, 4 wheel independent spring suspension as well as factory baked enamel finish.',7723,'16.24','37.76'), - -('S24_3151','1912 Ford Model T Delivery Wagon','Vintage Cars','1:24','Min Lin Diecast','This model features chrome trim and grille, opening hood, opening doors, opening trunk, detailed engine, working steering system. Color white.',9173,'46.91','88.51'), - -('S24_3191','1969 Chevrolet Camaro Z28','Classic Cars','1:24','Exoto Designs','1969 Z/28 Chevy Camaro 1:24 scale replica. The operating parts of this limited edition 1:24 scale diecast model car 1969 Chevy Camaro Z28- hood, trunk, wheels, streering, suspension and doors- are particularly delicate due to their precise scale and require special care and attention.',4695,'50.51','85.61'), - -('S24_3371','1971 Alpine Renault 1600s','Classic Cars','1:24','Welly Diecast Productions','This 1971 Alpine Renault 1600s replica Features opening doors, superb detail and craftsmanship, working steering system, opening forward compartment, opening rear trunk with removable spare, 4 wheel independent spring suspension as well as factory baked enamel finish.',7995,'38.58','61.23'), - -('S24_3420','1937 Horch 930V Limousine','Vintage Cars','1:24','Autoart Studio Design','Features opening hood, opening doors, opening trunk, wide white wall tires, front door arm rests, working steering system',2902,'26.30','65.75'), - -('S24_3432','2002 Chevy Corvette','Classic Cars','1:24','Gearbox Collectibles','The operating parts of this limited edition Diecast 2002 Chevy Corvette 50th Anniversary Pace car Limited Edition are particularly delicate due to their precise scale and require special care and attention. Features rotating wheels, poseable streering, opening doors and trunk.',9446,'62.11','107.08'), - -('S24_3816','1940 Ford Delivery Sedan','Vintage Cars','1:24','Carousel DieCast Legends','Chrome Trim, Chrome Grille, Opening Hood, Opening Doors, Opening Trunk, Detailed Engine, Working Steering System. Color black.',6621,'48.64','83.86'), - -('S24_3856','1956 Porsche 356A Coupe','Classic Cars','1:18','Classic Metal Creations','Features include: Turnable front wheels; steering function; detailed interior; detailed engine; opening hood; opening trunk; opening doors; and detailed chassis.',6600,'98.30','140.43'), - -('S24_3949','Corsair F4U ( Bird Cage)','Planes','1:24','Second Gear Diecast','Has retractable wheels and comes with a stand. Official logos and insignias.',6812,'29.34','68.24'), - -('S24_3969','1936 Mercedes Benz 500k Roadster','Vintage Cars','1:24','Red Start Diecast','This model features grille-mounted chrome horn, lift-up louvered hood, fold-down rumble seat, working steering system and rubber wheels. Color black.',2081,'21.75','41.03'), - -('S24_4048','1992 Porsche Cayenne Turbo Silver','Classic Cars','1:24','Exoto Designs','This replica features opening doors, superb detail and craftsmanship, working steering system, opening forward compartment, opening rear trunk with removable spare, 4 wheel independent spring suspension as well as factory baked enamel finish.',6582,'69.78','118.28'), - -('S24_4258','1936 Chrysler Airflow','Vintage Cars','1:24','Second Gear Diecast','Features opening trunk, working steering system. Color dark green.',4710,'57.46','97.39'), - -('S24_4278','1900s Vintage Tri-Plane','Planes','1:24','Unimax Art Galleries','Hand crafted diecast-like metal Triplane is Re-created in about 1:24 scale of antique pioneer airplane. This antique style metal triplane is all hand-assembled with many different parts.',2756,'36.23','72.45'), - -('S24_4620','1961 Chevrolet Impala','Classic Cars','1:18','Classic Metal Creations','This 1:18 scale precision die-cast reproduction of the 1961 Chevrolet Impala has all the features-doors, hood and trunk that open; detailed 409 cubic-inch engine; chrome dashboard and stick shift, two-tone interior; working steering system; all topped of with a factory baked-enamel finish.',7869,'32.33','80.84'), - -('S32_1268','1980’s GM Manhattan Express','Trucks and Buses','1:32','Motor City Art Classics','This 1980’s era new look Manhattan express is still active, running from the Bronx to mid-town Manhattan. Has 35 opeining windows and working lights. Needs a battery.',5099,'53.93','96.31'), - -('S32_1374','1997 BMW F650 ST','Motorcycles','1:32','Exoto Designs','Features official die-struck logos and baked enamel finish. Comes with stand.',178,'66.92','99.89'), - -('S32_2206','1982 Ducati 996 R','Motorcycles','1:32','Gearbox Collectibles','Features rotating wheels , working kick stand. Comes with stand.',9241,'24.14','40.23'), - -('S32_2509','1954 Greyhound Scenicruiser','Trucks and Buses','1:32','Classic Metal Creations','Model features bi-level seating, 50 windows, skylights & glare resistant glass, working steering system, original logos',2874,'25.98','54.11'), - -('S32_3207','1950\'s Chicago Surface Lines Streetcar','Trains','1:32','Gearbox Collectibles','This streetcar is a joy to see. It has 80 separate windows, electric wire guides, detailed interiors with seats, poles and drivers controls, rolling and turning wheel assemblies, plus authentic factory baked-enamel finishes (Green Hornet for Chicago and Cream and Crimson for Boston).',8601,'26.72','62.14'), - -('S32_3522','1996 Peterbilt 379 Stake Bed with Outrigger','Trucks and Buses','1:32','Red Start Diecast','This model features, opening doors, detailed engine, working steering, tinted windows, detailed interior, die-struck logos, removable stakes operating outriggers, detachable second trailer, functioning 360-degree self loader, precision molded resin trailer and trim, baked enamel finish on cab',814,'33.61','64.64'), - -('S32_4289','1928 Ford Phaeton Deluxe','Vintage Cars','1:32','Highway 66 Mini Classics','This model features grille-mounted chrome horn, lift-up louvered hood, fold-down rumble seat, working steering system',136,'33.02','68.79'), - -('S32_4485','1974 Ducati 350 Mk3 Desmo','Motorcycles','1:32','Second Gear Diecast','This model features two-tone paint with chrome accents, superior die-cast detail , rotating wheels , working kick stand',3341,'56.13','102.05'), - -('S50_1341','1930 Buick Marquette Phaeton','Vintage Cars','1:50','Studio M Art Models','Features opening trunk, working steering system',7062,'27.06','43.64'), - -('S50_1392','Diamond T620 Semi-Skirted Tanker','Trucks and Buses','1:50','Highway 66 Mini Classics','This limited edition model is licensed and perfectly scaled for Lionel Trains. The Diamond T620 has been produced in solid precision diecast and painted with a fire baked enamel finish. It comes with a removable tanker and is a perfect model to add authenticity to your static train or car layout or to just have on display.',1016,'68.29','115.75'), - -('S50_1514','1962 City of Detroit Streetcar','Trains','1:50','Classic Metal Creations','This streetcar is a joy to see. It has 99 separate windows, electric wire guides, detailed interiors with seats, poles and drivers controls, rolling and turning wheel assemblies, plus authentic factory baked-enamel finishes (Green Hornet for Chicago and Cream and Crimson for Boston).',1645,'37.49','58.58'), - -('S50_4713','2002 Yamaha YZR M1','Motorcycles','1:50','Autoart Studio Design','Features rotating wheels , working kick stand. Comes with stand.',600,'34.17','81.36'), - -('S700_1138','The Schooner Bluenose','Ships','1:700','Autoart Studio Design','All wood with canvas sails. Measures 31 1/2 inches in Length, 22 inches High and 4 3/4 inches Wide. Many extras.\r\nThe schooner Bluenose was built in Nova Scotia in 1921 to fish the rough waters off the coast of Newfoundland. Because of the Bluenose racing prowess she became the pride of all Canadians. Still featured on stamps and the Canadian dime, the Bluenose was lost off Haiti in 1946.',1897,'34.00','66.67'), - -('S700_1691','American Airlines: B767-300','Planes','1:700','Min Lin Diecast','Exact replia with official logos and insignias and retractable wheels',5841,'51.15','91.34'), - -('S700_1938','The Mayflower','Ships','1:700','Studio M Art Models','Measures 31 1/2 inches Long x 25 1/2 inches High x 10 5/8 inches Wide\r\nAll wood with canvas sail. Extras include long boats, rigging, ladders, railing, anchors, side cannons, hand painted, etc.',737,'43.30','86.61'), - -('S700_2047','HMS Bounty','Ships','1:700','Unimax Art Galleries','Measures 30 inches Long x 27 1/2 inches High x 4 3/4 inches Wide. \r\nMany extras including rigging, long boats, pilot house, anchors, etc. Comes with three masts, all square-rigged.',3501,'39.83','90.52'), - -('S700_2466','America West Airlines B757-200','Planes','1:700','Motor City Art Classics','Official logos and insignias. Working steering system. Rotating jet engines',9653,'68.80','99.72'), - -('S700_2610','The USS Constitution Ship','Ships','1:700','Red Start Diecast','All wood with canvas sails. Measures 31 1/2\" Length x 22 3/8\" High x 8 1/4\" Width. Extras include 4 boats on deck, sea sprite on bow, anchors, copper railing, pilot houses, etc.',7083,'33.97','72.28'), - -('S700_2824','1982 Camaro Z28','Classic Cars','1:18','Carousel DieCast Legends','Features include opening and closing doors. Color: White. \r\nMeasures approximately 9 1/2\" Long.',6934,'46.53','101.15'), - -('S700_2834','ATA: B757-300','Planes','1:700','Highway 66 Mini Classics','Exact replia with official logos and insignias and retractable wheels',7106,'59.33','118.65'), - -('S700_3167','F/A 18 Hornet 1/72','Planes','1:72','Motor City Art Classics','10\" Wingspan with retractable landing gears.Comes with pilot',551,'54.40','80.00'), - -('S700_3505','The Titanic','Ships','1:700','Carousel DieCast Legends','Completed model measures 19 1/2 inches long, 9 inches high, 3inches wide and is in barn red/black. All wood and metal.',1956,'51.09','100.17'), - -('S700_3962','The Queen Mary','Ships','1:700','Welly Diecast Productions','Exact replica. Wood and Metal. Many extras including rigging, long boats, pilot house, anchors, etc. Comes with three masts, all square-rigged.',5088,'53.63','99.31'), - -('S700_4002','American Airlines: MD-11S','Planes','1:700','Second Gear Diecast','Polished finish. Exact replia with official logos and insignias and retractable wheels',8820,'36.27','74.03'), - -('S72_1253','Boeing X-32A JSF','Planes','1:72','Motor City Art Classics','10\" Wingspan with retractable landing gears.Comes with pilot',4857,'32.77','49.66'), - -('S72_3212','Pont Yacht','Ships','1:72','Unimax Art Galleries','Measures 38 inches Long x 33 3/4 inches High. Includes a stand.\r\nMany extras including rigging, long boats, pilot house, anchors, etc. Comes with 2 masts, all square-rigged',414,'33.30','54.60'); - -/*!40101 SET SQL_MODE=@OLD_SQL_MODE */; -/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; -/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; -/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; - diff --git a/database-files/inserts.sql b/database-files/inserts.sql new file mode 100644 index 0000000000..d79fce4a3a --- /dev/null +++ b/database-files/inserts.sql @@ -0,0 +1,161 @@ +Use BallWatch; + +-- Clear existing data to prevent duplicates +SET FOREIGN_KEY_CHECKS = 0; +DELETE FROM TeamsPlayers; +DELETE FROM DraftEvaluations; +DELETE FROM PlayerGameStats; +DELETE FROM PlayerMatchup; +DELETE FROM GamePlans; +DELETE FROM Game; +DELETE FROM Players; +DELETE FROM Teams; +DELETE FROM Agent; +DELETE FROM Users; +SET FOREIGN_KEY_CHECKS = 1; + +-- Reset auto-increment values +ALTER TABLE Users AUTO_INCREMENT = 1; +ALTER TABLE Agent AUTO_INCREMENT = 1; +ALTER TABLE Teams AUTO_INCREMENT = 1; +ALTER TABLE Players AUTO_INCREMENT = 1; +ALTER TABLE Game AUTO_INCREMENT = 1; +ALTER TABLE DraftEvaluations AUTO_INCREMENT = 1; +ALTER TABLE GamePlans AUTO_INCREMENT = 1; + +INSERT INTO Users (email, username, role) VALUES +('mike.lewis@ballwatch.com', 'mlewis', 'admin'), +('marcus.thompson@nets.com', 'mthompson', 'coach'), +('andre.wu@nets.com', 'awu', 'gm'), +('johnny.evans@gmail.com', 'jevans25', 'fan'); + + +INSERT INTO Agent (first_name, last_name, agency_name, phone, email) VALUES +('Rich', 'Paul', 'Klutch Sports Group', '555-0101', 'rich@klutchsports.com'), +('Jeff', 'Schwartz', 'Excel Sports Management', '555-0102', 'jeff@excelsports.com'), +('Mark', 'Bartelstein', 'Priority Sports', '555-0103', 'mark@prioritysports.com'); + + +INSERT INTO Teams (name, conference, division, coach, gm, offensive_system, defensive_system) VALUES +('Brooklyn Nets', 'Eastern', 'Atlantic', 'Marcus Thompson', 'Andre Wu', 'Motion Offense', 'Switch Everything'), +('Los Angeles Lakers', 'Western', 'Pacific', 'Darvin Ham', 'Rob Pelinka', 'LeBron System', 'Drop Coverage'), +('Golden State Warriors', 'Western', 'Pacific', 'Steve Kerr', 'Bob Myers', 'Motion Offense', 'Aggressive Switching'), +('Boston Celtics', 'Eastern', 'Atlantic', 'Joe Mazzulla', 'Brad Stevens', 'Five Out', 'Drop Coverage'), +('Miami Heat', 'Eastern', 'Southeast', 'Erik Spoelstra', 'Pat Riley', 'Zone Attack', 'Zone Defense'); + + +INSERT INTO Players (first_name, last_name, age, college, position, weight, player_status, agent_id, + height, DOB, years_exp, dominant_hand, expected_salary, player_type, current_salary, draft_year) VALUES +('Kevin', 'Durant', 35, 'Texas', 'SF', 240, 'Active', 1, '6-10', '1988-09-29', 16, 'Right', +51000000, 'Superstar', 47649433, 2007), +('Kyrie', 'Irving', 32, 'Duke', 'PG', 195, 'Active', 2, '6-2', '1992-03-23', 13, 'Right', +42000000, 'All-Star', 37037037, 2011), +('Ben', 'Simmons', 28, 'LSU', 'PF', 240, 'Active', 1, '6-10', '1996-07-20', 8, 'Left', +40000000, 'All-Star', 37893408, 2016), +('LeBron', 'James', 39, NULL, 'SF', 250, 'Active', 1, '6-9', '1984-12-30', 21, 'Right', +51000000, 'Superstar', 47607350, 2003), +('Stephen', 'Curry', 36, 'Davidson', 'PG', 185, 'Active', 2, '6-2', '1988-03-14', 15, 'Right', +55000000, 'Superstar', 51915615, 2009), +('Cooper', 'Flagg', 18, 'Duke', 'SF', 200, 'Active', 3, '6-9', '2006-12-21', 0, 'Right', +10000000, 'Rookie', 0, 2025), +('Jayson', 'Tatum', 26, 'Duke', 'SF', 210, 'Active', 2, '6-8', '1998-03-03', 7, 'Right', +35000000, 'All-Star', 32600060, 2017), +('Jimmy', 'Butler', 34, 'Marquette', 'SF', 230, 'Active', 1, '6-7', '1989-09-14', 13, 'Right', +48000000, 'All-Star', 48798677, 2011); + + +INSERT INTO TeamsPlayers (player_id, team_id, joined_date, jersey_num) VALUES +(1, 1, '2023-02-09', 7), +(2, 1, '2023-02-06', 11), +(3, 1, '2022-02-10', 10), +(4, 2, '2018-07-01', 23), +(5, 3, '2009-06-25', 30), +(7, 4, '2017-06-22', 0), +(8, 5, '2019-07-06', 22); + + +INSERT INTO Game (game_date, season, game_type, home_team_id, away_team_id, home_score, away_score) VALUES +('2025-01-15', '2024-25', 'regular', 1, 2, 118, 112), +('2025-01-18', '2024-25', 'regular', 3, 1, 125, 120), +('2025-01-20', '2024-25', 'regular', 1, 3, 108, 115), +('2025-01-22', '2024-25', 'regular', 4, 5, 122, 118), +('2025-01-25', '2024-25', 'regular', 1, 4, 110, 105); + + +INSERT INTO LineupConfiguration (team_id, quarter, time_on, time_off, plus_minus, offensive_rating, defensive_rating) VALUES +(1, 1, '12:00:00', '06:00:00', 8, 118.5, 105.2), +(1, 2, '12:00:00', '05:30:00', -3, 102.3, 108.7), +(3, 1, '12:00:00', '07:00:00', 5, 115.2, 110.1), +(2, 1, '12:00:00', '06:30:00', 6, 112.5, 108.3); + + +INSERT INTO PlayerGameStats (player_id, game_id, points, rebounds, assists, shooting_percentage, + plus_minus, minutes_played, turnovers, steals, blocks) VALUES +(1, 1, 35, 8, 5, 0.58, 12, 38, 3, 1, 2), +(2, 1, 28, 4, 8, 0.52, 8, 36, 4, 2, 0), +(3, 1, 12, 6, 4, 0.33, 6, 24, 2, 0, 1), +(4, 1, 32, 10, 7, 0.55, -6, 37, 5, 1, 1), +(1, 2, 30, 6, 4, 0.48, -5, 35, 2, 0, 1), +(5, 2, 38, 5, 11, 0.62, 5, 36, 3, 3, 0), +(1, 3, 27, 9, 3, 0.45, -7, 34, 4, 1, 2), +(5, 3, 42, 4, 9, 0.68, 7, 38, 2, 2, 0); + + +INSERT INTO PlayerMatchup (game_id, offensive_player_id, defensive_player_id, offensive_rating, defensive_rating, + possessions, points_scored, shooting_percentage) VALUES +(1, 1, 4, 125.5, 98.3, 15, 18, 0.60), +(1, 4, 1, 118.2, 102.5, 12, 14, 0.54), +(2, 5, 2, 132.1, 95.2, 18, 22, 0.65), +(3, 1, 5, 108.5, 112.3, 16, 12, 0.40); + +INSERT INTO DraftEvaluations (player_id, overall_rating, offensive_rating, defensive_rating, athleticism_rating, potential_rating, evaluation_type, strengths, weaknesses, scout_notes, projected_round, comparison_player) VALUES +(1, 85.5, 82.0, 88.0, 90.0, 92.0, 'free_agent', 'Elite scorer with incredible range', 'Can be inconsistent on defense', 'Future Hall of Famer still playing at elite level', 1, 'Larry Bird'), +(2, 78.0, 85.0, 72.0, 75.0, 80.0, 'free_agent', 'Excellent ball handling and clutch gene', 'Can be a defensive liability', 'Elite offensive player when healthy', 1, 'Allen Iverson'), +(4, 82.0, 75.0, 90.0, 85.0, 78.0, 'free_agent', 'Best shooter of all time', 'Sometimes struggles with size', 'Revolutionary player who changed the game', 1, 'Ray Allen'); + + +INSERT INTO GamePlans (team_id, opponent_id, game_id, plan_name, offensive_strategy, defensive_strategy, special_instructions) VALUES +(1, 3, 3, 'Warriors Game Plan', 'Attack Curry in pick and roll. Post up KD against smaller defenders.', + 'Switch 1-4, drop big on Curry PnR. Force others to beat us.', + 'Double Curry on all side PnRs in clutch time.'), +(1, 4, 5, 'Celtics Game Plan', 'Run through KD in the post. Push pace in transition.', + 'Pack the paint against Tatum drives. Stay home on shooters.', + 'Hack-a-Simmons if game is close in final 2 minutes.'); + + +INSERT INTO KeyMatchups (matchup_text) VALUES +('KD vs LeBron - Battle of the forwards'), +('Kyrie vs Curry - Elite guard matchup'), +('Simmons vs Draymond - Defensive anchors'), +('Durant vs Tatum - Scoring duel'), +('Kyrie vs Smart - Crafty guard battle'); + + +INSERT INTO SystemHealth (service_name, error_rate_pct, avg_response_time, status) VALUES +('API Gateway', 0.02, 145.5, 'Healthy'), +('Database Cluster', 0.00, 23.2, 'Healthy'), +('Cache Layer', 0.15, 8.5, 'Warning'), +('Load Balancer', 0.01, 12.3, 'Healthy'), +('File Storage', 0.05, 156.7, 'Healthy'); + + +INSERT INTO DataLoads (load_type, status, started_at, completed_at, records_processed, records_failed, initiated_by, source_file) VALUES +('NBA_API', 'completed', '2025-01-20 02:00:00', '2025-01-20 02:15:30', 1250, 0, 'system', 'nba_daily_feed.json'), +('ESPN_Feed', 'completed', '2025-01-21 02:00:00', '2025-01-21 02:12:45', 980, 5, 'system', 'espn_stats.csv'), +('NBA_API', 'failed', '2025-01-22 02:00:00', '2025-01-22 02:05:15', 0, 500, 'system', 'nba_daily_feed.json'), +('Stats_API', 'completed', '2025-01-23 02:00:00', '2025-01-23 02:18:22', 1150, 8, 'system', 'advanced_stats.json'), +('NBA_API', 'completed', '2025-01-24 02:00:00', '2025-01-24 02:14:55', 1300, 2, 'system', 'nba_daily_feed.json'); + + +INSERT INTO ErrorLogs (error_type, severity, module, error_message, user_id) VALUES +('DataQuality', 'warning', 'DataValidation', 'Found 3 players with shooting percentage > 1.0', 1), +('APITimeout', 'error', 'DataIngestion', 'NBA API request timeout after 30 seconds', 1), +('DatabaseConnection', 'critical', 'SystemHealth', 'Lost connection to replica database', NULL), +('DataIntegrity', 'error', 'DataValidation', 'Duplicate game entries detected', 1), +('MemoryLimit', 'warning', 'CacheLayer', 'Cache memory limit exceeded', NULL); + + +INSERT INTO DataErrors (error_type, table_name, record_id, field_name, invalid_value, expected_format, detected_at) VALUES +('invalid', 'PlayerGameStats', '123', 'shooting_percentage', '1.25', 'Decimal between 0 and 1', '2025-08-03 14:30:00'), +('duplicate', 'Game', '456', 'game_id', '456', 'Unique identifier', '2025-08-13 09:15:00'), +('missing', 'Players', '789', 'position', NULL, 'Required enum value', '2025-08-08 16:45:00'); \ No newline at end of file diff --git a/database-files/ngo_db.sql b/database-files/ngo_db.sql deleted file mode 100644 index 526ba0070c..0000000000 --- a/database-files/ngo_db.sql +++ /dev/null @@ -1,63 +0,0 @@ -DROP DATABASE IF EXISTS ngo_database; -CREATE DATABASE IF NOT EXISTS ngo_database; - -USE ngo_database; - - -CREATE TABLE IF NOT EXISTS WorldNGOs ( - NGO_ID INT AUTO_INCREMENT PRIMARY KEY, - Name VARCHAR(255) NOT NULL, - Country VARCHAR(100) NOT NULL, - Founding_Year INTEGER, - Focus_Area VARCHAR(100), - Website VARCHAR(255) -); - -CREATE TABLE IF NOT EXISTS Projects ( - Project_ID INT AUTO_INCREMENT PRIMARY KEY, - Project_Name VARCHAR(255) NOT NULL, - Focus_Area VARCHAR(100), - Budget DECIMAL(15, 2), - NGO_ID INT, - Start_Date DATE, - End_Date DATE, - FOREIGN KEY (NGO_ID) REFERENCES WorldNGOs(NGO_ID) -); - -CREATE TABLE IF NOT EXISTS Donors ( - Donor_ID INT AUTO_INCREMENT PRIMARY KEY, - Donor_Name VARCHAR(255) NOT NULL, - Donor_Type ENUM('Individual', 'Organization') NOT NULL, - Donation_Amount DECIMAL(15, 2), - NGO_ID INT, - FOREIGN KEY (NGO_ID) REFERENCES WorldNGOs(NGO_ID) -); - -INSERT INTO WorldNGOs (Name, Country, Founding_Year, Focus_Area, Website) -VALUES -('World Wildlife Fund', 'United States', 1961, 'Environmental Conservation', 'https://www.worldwildlife.org'), -('Doctors Without Borders', 'France', 1971, 'Medical Relief', 'https://www.msf.org'), -('Oxfam International', 'United Kingdom', 1995, 'Poverty and Inequality', 'https://www.oxfam.org'), -('Amnesty International', 'United Kingdom', 1961, 'Human Rights', 'https://www.amnesty.org'), -('Save the Children', 'United States', 1919, 'Child Welfare', 'https://www.savethechildren.org'), -('Greenpeace', 'Netherlands', 1971, 'Environmental Protection', 'https://www.greenpeace.org'), -('International Red Cross', 'Switzerland', 1863, 'Humanitarian Aid', 'https://www.icrc.org'), -('CARE International', 'Switzerland', 1945, 'Global Poverty', 'https://www.care-international.org'), -('Habitat for Humanity', 'United States', 1976, 'Affordable Housing', 'https://www.habitat.org'), -('Plan International', 'United Kingdom', 1937, 'Child Rights', 'https://plan-international.org'); - -INSERT INTO Projects (Project_Name, Focus_Area, Budget, NGO_ID, Start_Date, End_Date) -VALUES -('Save the Amazon', 'Environmental Conservation', 5000000.00, 1, '2022-01-01', '2024-12-31'), -('Emergency Medical Aid in Syria', 'Medical Relief', 3000000.00, 2, '2023-03-01', '2023-12-31'), -('Education for All', 'Poverty and Inequality', 2000000.00, 3, '2021-06-01', '2025-05-31'), -('Human Rights Advocacy in Asia', 'Human Rights', 1500000.00, 4, '2022-09-01', '2023-08-31'), -('Child Nutrition Program', 'Child Welfare', 2500000.00, 5, '2022-01-01', '2024-01-01'); - -INSERT INTO Donors (Donor_Name, Donor_Type, Donation_Amount, NGO_ID) -VALUES -('Bill & Melinda Gates Foundation', 'Organization', 10000000.00, 1), -('Elon Musk', 'Individual', 5000000.00, 2), -('Google.org', 'Organization', 2000000.00, 3), -('Open Society Foundations', 'Organization', 3000000.00, 4), -('Anonymous Philanthropist', 'Individual', 1000000.00, 5); \ No newline at end of file diff --git a/docker-compose-testing.yaml b/docker-compose-testing.yaml deleted file mode 100644 index 5b7ce4d694..0000000000 --- a/docker-compose-testing.yaml +++ /dev/null @@ -1,28 +0,0 @@ -name: project-app-testing -services: - app-test: - build: ./app - container_name: web-app-test - hostname: web-app - volumes: ["./app/src:/appcode"] - ports: - - 8502:8501 - - api-test: - build: ./api - container_name: web-api-test - hostname: web-api - volumes: ["./api:/apicode"] - ports: - - 4001:4000 - - db-test: - env_file: - - ./api/.env - image: mysql:9 - container_name: mysql-db-test - hostname: db - volumes: - - ./database-files:/docker-entrypoint-initdb.d/:ro - ports: - - 3201:3306