diff --git a/api/backend/ml_models/__init__.py b/Dockerfile similarity index 100% rename from api/backend/ml_models/__init__.py rename to Dockerfile diff --git a/api/.env.template b/api/.env.template index b24b99326f..3f10128f11 100644 --- a/api/.env.template +++ b/api/.env.template @@ -2,5 +2,5 @@ SECRET_KEY=someCrazyS3cR3T!Key.! DB_USER=root DB_HOST=db DB_PORT=3306 -DB_NAME=northwind -MYSQL_ROOT_PASSWORD= +DB_NAME=coopalytics +MYSQL_ROOT_PASSWORD=" " diff --git a/api/backend/advisoradvisee/advisoradvisee_routes.py b/api/backend/advisoradvisee/advisoradvisee_routes.py new file mode 100644 index 0000000000..0abe173dc9 --- /dev/null +++ b/api/backend/advisoradvisee/advisoradvisee_routes.py @@ -0,0 +1,56 @@ +from flask import Blueprint +from flask import request +from flask import jsonify +from flask import make_response +from flask import current_app +from backend.db_connection import db + +advisoradvisee = Blueprint('advisoradvisee', __name__) + +# Advisor identifies students with too few applications +@advisoradvisee.route('/advisor//students/low-applications', methods=['GET']) +def get_students_with_low_applications(advisorID): + current_app.logger.info('GET /advisor//students/low-applications route') + + query = ''' + SELECT u.userId, + u.firstName, + u.lastName, + COUNT(apps.applicationId) AS totalApps + FROM advisor_advisee aa + JOIN users u ON u.userId = aa.studentId + LEFT JOIN appliesToApp ata ON ata.studentId = u.userId + LEFT JOIN applications apps ON ata.applicationId = apps.applicationId + WHERE aa.advisorId = {0} + GROUP BY u.userId, u.firstName, u.lastName + HAVING COUNT(apps.applicationId) < 5 + ORDER BY totalApps ASC, u.lastName; + '''.format(advisorID) + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Admin reassigns students to different advisors as needed +@advisoradvisee.route('/admin//', + methods = ['PUT']) +def reassignAdvisor(): + current_app.logger.info('PUT /admin// route') + advisorId = request.json + studentId = request.json + + query = ''' + UPDATE advisor_advisee + SET advisorId = %s + WHERE studentId = %s; + ''' + data=(advisorId, studentId) + cursor = db.get_db().cursor() + r = cursor.execute(query, data) + db.get_db().commit() + return 'advisor reassigned successfully' + diff --git a/api/backend/applications/applications_routes.py b/api/backend/applications/applications_routes.py new file mode 100644 index 0000000000..49a756c01b --- /dev/null +++ b/api/backend/applications/applications_routes.py @@ -0,0 +1,155 @@ +from flask import Blueprint +from flask import request +from flask import jsonify +from flask import make_response +from flask import current_app +from backend.db_connection import db + +# New Blueprint for applications +applications = Blueprint('applications', __name__) + + +# Student viewing their own application statuses +@applications.route('/student//applications', methods=['GET']) +def get_student_applications(studentID): + current_app.logger.info('GET /student//applications route') + + query = ''' + SELECT u.userId, + u.firstName, + u.lastName, + a.applicationId, + a.status AS applicationStatus, + cp.title AS positionTitle, + cp.deadline AS applicationDeadline, + com.name AS companyName, + a.dateApplied, + cp.description AS positionDescription + FROM users u + JOIN appliesToApp ata ON u.userId = ata.studentId + JOIN applications a ON ata.applicationId = a.applicationId + JOIN coopPositions cp ON a.coopPositionId = cp.coopPositionId + JOIN companyProfiles com ON cp.companyProfileId = com.companyProfileId + WHERE u.userId = %s + ORDER BY a.dateApplied DESC, cp.deadline ASC + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# student sees how many positions they have applied to +@applications.route('/student//applications/summary', methods=['GET']) +def get_numb_apps(studentID): + current_app.logger.info('GET /student//applications route') + + query = ''' + SELECT a.status, + COUNT(*) AS ApplicationCount + FROM applications a + JOIN appliesToApp ata ON a.applicationId = ata.applicationId + WHERE ata.studentId = %s + GROUP BY a.status + + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (studentID,)) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + + + +# Advisor viewing all their advisees' application statuses +@applications.route('/advisor//students/applications', methods=['GET']) +def get_advisor_student_applications(advisorID): + current_app.logger.info('GET /advisor//students/applications route') + + query = ''' + SELECT aa.advisorId, + u.userId, + u.firstName, + u.lastName, + a.applicationId, + a.status AS applicationStatus, + cp.title AS positionTitle, + cp.deadline AS applicationDeadline, + com.name AS companyName, + a.dateApplied + FROM advisor_advisee aa + JOIN users u ON aa.studentId = u.userId + JOIN appliesToApp ata ON u.userId = ata.studentId + JOIN applications a ON ata.applicationId = a.applicationId + JOIN coopPositions cp ON a.coopPositionId = cp.coopPositionId + JOIN companyProfiles com ON cp.companyProfileId = com.companyProfileId + LEFT JOIN workedAtPos wp ON u.userId = wp.studentId AND wp.coopPositionId = cp.coopPositionId + WHERE aa.advisorId = {0} + ORDER BY u.lastName, u.firstName, a.dateApplied DESC + '''.format(advisorID) + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Employer views all applications of a posting +@applications.route('/applications/', methods=['GET']) +def get_applications(coopPositionId): + current_app.logger.info('GET /applications/%s', coopPositionId) + + query = ''' + SELECT a.dateTimeApplied, a.status, a.resume, a.gpa, a.coverLetter, + a.coopPositionId, a.applicationId + FROM applications a + JOIN coopPositions cp ON a.coopPositionId = cp.coopPositionId + WHERE a.coopPositionId = %s + ORDER BY a.dateTimeApplied DESC; + ''' + cursor = db.get_db().cursor(dictionary=True) + cursor.execute(query, (coopPositionId,)) + theData = cursor.fetchall() + + return make_response(jsonify(theData), 200) + +# Student applies to a position +@applications.route('/users/appliesToApp/applications', methods=['POST']) +def create_application(): + current_app.logger.info('GET /applications') + application_info = request.json + datetime_applied = application_info['dateTimeApplied'] + status = application_info['status'] + resume = application_info['resume'] + gpa = application_info['gpa'] + cover_letter = application_info['coverLetter'] + coop_position_id = application_info['coopPositionId'] + application_id = application_info['applicationId'] + + + query = ''' +INSERT INTO applications +VALUES (dateTimeApplied = %s, + status = %s, + resume = %s, + gpa = %s, + coverLetter = %s, + coopPositionId = %s, + applicationId = %s); + ''' + data = (datetime_applied, status, resume, gpa, cover_letter, + coop_position_id, application_id) + + cursor = db.get_db().cursor() + r = cursor.execute(query, data) + db.get_db().commit() + return 'application submitted!' + diff --git a/api/backend/companyProfiles/companyProfiles_routes.py b/api/backend/companyProfiles/companyProfiles_routes.py new file mode 100644 index 0000000000..e95141240e --- /dev/null +++ b/api/backend/companyProfiles/companyProfiles_routes.py @@ -0,0 +1,44 @@ +from flask import Blueprint +from flask import request +from flask import jsonify +from flask import make_response +from flask import current_app +from backend.db_connection import db + +companyProfiles = Blueprint('companyProfiles', __name__) + +# Student/Advisor views a company profile +@companyProfiles.route('/companyProfiles/', methods=['GET']) +def get_company_profile(companyProfileId): + query = ''' + SELECT * + FROM companyProfiles + WHERE companyProfileId = %s + '''.format(companyProfileId) + + cursor = db.get_db().cursor() + cursor.execute(query) + columns = [col[0] for col in cursor.description] + theData = [dict(zip(columns, row)) for row in cursor.fetchall()] + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Advisor views all company profiles +@companyProfiles.route('/companyProfiles', methods=['GET']) +def get_all_company_profiles(): + query = ''' + SELECT companyProfileId, name, bio, industry, websiteLink + FROM companyProfiles + ORDER BY name + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + columns = [col[0] for col in cursor.description] + theData = [dict(zip(columns, row)) for row in cursor.fetchall()] + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response \ No newline at end of file diff --git a/api/backend/coopPositions/coopPositions_routes.py b/api/backend/coopPositions/coopPositions_routes.py new file mode 100644 index 0000000000..ac567eee50 --- /dev/null +++ b/api/backend/coopPositions/coopPositions_routes.py @@ -0,0 +1,323 @@ +from flask import Blueprint +from flask import request +from flask import jsonify +from flask import make_response +from flask import current_app +from backend.db_connection import db + +coopPositions = Blueprint('coopPositions', __name__) + +#Student views a co-op position +@coopPositions.route('/positions', methods = ['GET']) +def get_position_info(): + current_app.logger.info('GET /positions route') + query = ''' + SELECT cp.* + FROM coopPositions cp + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + + +# Student/Advisor views the average pay for each industry +@coopPositions.route('/industryAveragePay', methods=['GET']) +def get_industry_average_pay(): + query = ''' + SELECT cp.industry, AVG(cp.hourlyPay) AS industryAvgHourlyPay + FROM coopPositions cp + GROUP BY cp.industry; + ''' + + current_app.logger.info('GET /industryAveragePay route') + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + + +# Student view positions with desired skills that match their skills +@coopPositions.route('//desiredSkills', methods=['GET']) +def get_desired_skills(studentID): + current_app.logger.info('GET /desiredSkills route') + + query = ''' + SELECT cp.coopPositionId, + cp.title, + cp.location, + cp.description + FROM coopPositions cp + LEFT JOIN viewsPos vp ON cp.coopPositionId = vp.coopPositionId + JOIN users u ON u.userId = %s + WHERE (vp.preference IS NULL OR vp.preference = TRUE) + AND cp.desiredSkillsId IN (SELECT skillId + FROM skillDetails + WHERE studentId = %s) + AND (cp.desiredGPA IS NULL OR cp.desiredGPA <= u.grade) + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (studentID, studentID)) + theData = cursor.fetchall() + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + + +# students view positions with required skills that match their skills +@coopPositions.route('//requiredSkills', methods=['GET']) +def get_required_skills(studentID): + current_app.logger.info('GET /requiredSkills route') + + query = ''' + SELECT cp.coopPositionId, + cp.title, + cp.location, + cp.description + FROM coopPositions cp + LEFT JOIN viewsPos vp ON cp.coopPositionId = vp.coopPositionId + JOIN users u ON u.userId = %s + WHERE (vp.preference IS NULL OR vp.preference = TRUE) + AND cp.requiredSkillsId IN (SELECT skillId + FROM skillDetails + WHERE studentId = %s) + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (studentID, studentID)) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + + +# Employer posts co-op position +@coopPositions.route('/createsPos/coopPosition', methods=['POST']) +def create_position(): + current_app.logger.info('POST /createsPos/coopPosition') + pos_info = request.json + coop_position_id = pos_info['coopPositionId'], + title = pos_info['title'], + location = pos_info['location'], + description = pos_info['description'], + hourly_pay = pos_info['hourlyPay'], + required_skills = pos_info.get('requiredSkillsId'), + desired_skills = pos_info.get('desiredSkillsId'), + desired_gpa = pos_info.get('desiredGPA'), + deadline = pos_info.get('deadline'), + start_date = pos_info['startDate'], + end_date = pos_info['endDate'], + flag = pos_info.get('flagged', False), + industry = pos_info['industry'] + + query = ''' + INSERT INTO coopPositions + (coopPositionId, title, location, description, hourlyPay, requiredSkillsId, + desiredSkillsId, desiredGPA, deadline, startDate, endDate, flagged, industry) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); + ''' + data = (coop_position_id, title, location, description, hourly_pay, + required_skills, desired_skills, desired_gpa, deadline,start_date, + end_date, flag, industry) + + cursor = db.get_db().cursor() + cursor.execute(query, data) + db.get_db().commit() + return make_response(jsonify({"message": "Position created!"}), 201) + + + +# Admin reviews positions before they go live +@coopPositions.route('/pending', methods=['GET']) +def get_pending_positions(): + current_app.logger.info('GET /pending route') + + query = ''' + SELECT + cp.coopPositionId, + cp.title, + cp.location, + cp.description, + cp.hourlyPay, + cp.deadline, + cp.startDate, + cp.endDate, + cp.industry, + com.name AS companyName + FROM coopPositions cp + LEFT JOIN createsPos cr ON cr.coopPositionId = cp.coopPositionId + LEFT JOIN users u ON u.userId = cr.employerId + LEFT JOIN companyProfiles com ON com.companyProfileId = u.companyProfileId + WHERE cp.flag = FALSE + ORDER BY cp.deadline IS NULL, cp.deadline ASC, cp.coopPositionId DESC + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Admin views number of co-ops posted by each employer +@coopPositions.route('/employerJobCounts', methods=['GET']) +def get_employer_job_counts(): + current_app.logger.info('GET /employerJobCounts route') + + query = ''' + SELECT + u.userId AS employerId, + u.firstName, + u.lastName, + com.name AS companyName, + COUNT(cr.coopPositionId) AS numJobs + FROM users u + JOIN companyProfiles com + ON com.companyProfileId = u.companyProfileId + LEFT JOIN createsPos cr + ON cr.employerId = u.userId + WHERE u.companyProfileId IS NOT NULL + GROUP BY u.userId, u.firstName, u.lastName, com.name + ORDER BY numJobs DESC, u.lastName ASC, u.firstName ASC; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Admin approves a co-op position +@coopPositions.route('//approve', methods=['PUT']) +def approve_position(pos_id): + current_app.logger.info('PUT /%s/approve route', pos_id) + + query = ''' + UPDATE coopPositions + SET flag = FALSE + WHERE coopPositionId = %s AND flag = TRUE + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (pos_id,)) + if cursor.rowcount == 0: + the_response = make_response(jsonify({"ok": False, "error": "not found or already approved"})) + the_response.status_code = 409 + return the_response + + db.get_db().commit() + the_response = make_response(jsonify({"ok": True, "positionId": pos_id, "status": "approved"})) + the_response.status_code = 200 + return the_response + +# Admin deletes an unapproved/invalid posting +@coopPositions.route('/', methods=['DELETE']) +def delete_unapproved_position(pos_id): + current_app.logger.info('DELETE /%s route', pos_id) + + query = ''' + DELETE FROM coopPositions + WHERE coopPositionId = %s + AND flag = TRUE + ''' + + cursor = db.get_db().cursor() + try: + cursor.execute(query, (pos_id,)) + if cursor.rowcount == 0: + the_response = make_response(jsonify({ + "ok": False, + "error": "not found or already approved" + })) + the_response.status_code = 409 + return the_response + + db.get_db().commit() + the_response = make_response(jsonify({"ok": True, "positionId": pos_id, "deleted": True})) + the_response.status_code = 200 + return the_response + + except Exception as e: + the_response = make_response(jsonify({ + "ok": False, + "error": "cannot delete due to related records" + })) + the_response.status_code = 409 + return the_response + +# Admin flags a position +@coopPositions.route('//flag/', methods=['PUT']) +def set_position_flag(pos_id, value): + current_app.logger.info('PUT /%s/flag/%s route', pos_id, value) + + query = ''' + UPDATE coopPositions + SET flag = %s + WHERE coopPositionId = %s; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (value, pos_id)) + db.get_db().commit() + + the_response = make_response(jsonify({'message': 'flag updated!'})) + the_response.status_code = 200 + return the_response + +# Admin removes a flag from a position +@coopPositions.route('//unflag', methods=['PUT']) +def unflag_position(pos_id): + current_app.logger.info('PUT /%s/unflag route', pos_id) + + query = ''' + UPDATE coopPositions + SET flag = FALSE + WHERE coopPositionId = %s; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (pos_id,)) + db.get_db().commit() + + the_response = make_response(jsonify({'message': 'flag removed!'})) + the_response.status_code = 200 + return the_response + +@coopPositions.route('/allPositions', methods=['GET']) +def get_all_positions(): + current_app.logger.info('GET /allPositions route') + query = ''' + SELECT + coopPositionId, + title, + location, + description, + hourlyPay, + desiredGPA, + deadline, + startDate, + endDate, + industry + FROM coopPositions + ORDER BY deadline ASC, coopPositionId DESC + ''' + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + diff --git a/api/backend/customers/customer_routes.py b/api/backend/customers/customer_routes.py deleted file mode 100644 index 4fda460220..0000000000 --- a/api/backend/customers/customer_routes.py +++ /dev/null @@ -1,83 +0,0 @@ -######################################################## -# Sample customers blueprint of endpoints -# Remove this file if you are not using it in your project -######################################################## -from flask import Blueprint -from flask import request -from flask import jsonify -from flask import make_response -from flask import current_app -from backend.db_connection import db -from backend.ml_models.model01 import predict - -#------------------------------------------------------------ -# Create a new Blueprint object, which is a collection of -# routes. -customers = Blueprint('customers', __name__) - - -#------------------------------------------------------------ -# Get all customers from the system -@customers.route('/customers', methods=['GET']) -def get_customers(): - - cursor = db.get_db().cursor() - cursor.execute('''SELECT id, company, last_name, - first_name, job_title, business_phone FROM customers - ''') - - theData = cursor.fetchall() - - the_response = make_response(jsonify(theData)) - the_response.status_code = 200 - return the_response - -#------------------------------------------------------------ -# Update customer info for customer with particular userID -# Notice the manner of constructing the query. -@customers.route('/customers', methods=['PUT']) -def update_customer(): - current_app.logger.info('PUT /customers route') - cust_info = request.json - cust_id = cust_info['id'] - first = cust_info['first_name'] - last = cust_info['last_name'] - company = cust_info['company'] - - query = 'UPDATE customers SET first_name = %s, last_name = %s, company = %s where id = %s' - data = (first, last, company, cust_id) - cursor = db.get_db().cursor() - r = cursor.execute(query, data) - db.get_db().commit() - return 'customer updated!' - -#------------------------------------------------------------ -# Get customer detail for customer with particular userID -# Notice the manner of constructing the query. -@customers.route('/customers/', methods=['GET']) -def get_customer(userID): - current_app.logger.info('GET /customers/ route') - cursor = db.get_db().cursor() - cursor.execute('SELECT id, first_name, last_name FROM customers WHERE id = {0}'.format(userID)) - - theData = cursor.fetchall() - - the_response = make_response(jsonify(theData)) - the_response.status_code = 200 - return the_response - -#------------------------------------------------------------ -# Makes use of the very simple ML model in to predict a value -# and returns it to the user -@customers.route('/prediction//', methods=['GET']) -def predict_value(var01, var02): - current_app.logger.info(f'var01 = {var01}') - current_app.logger.info(f'var02 = {var02}') - - returnVal = predict(var01, var02) - return_dict = {'result': returnVal} - - the_response = make_response(jsonify(return_dict)) - the_response.status_code = 200 - the_response.mimetype = 'application/json' - return the_response \ No newline at end of file diff --git a/api/backend/demographics/demographics_routes.py b/api/backend/demographics/demographics_routes.py new file mode 100644 index 0000000000..3276e763d4 --- /dev/null +++ b/api/backend/demographics/demographics_routes.py @@ -0,0 +1,62 @@ +from flask import Blueprint +from flask import request +from flask import jsonify +from flask import make_response +from flask import current_app +from backend.db_connection import db + +demographics = Blueprint('demographics', __name__) + +# DEI by employer +@demographics.route('/demographics/employers/gender', methods=['GET']) +def dei_employers_gender(): + current_app.logger.info('GET /demographics/employers/gender route') + query = ''' + SELECT + com.name AS companyName, + d.gender, + COUNT(*) AS applicationCount + FROM applications a + JOIN appliesToApp ata ON ata.applicationId = a.applicationId + JOIN users us ON us.userId = ata.studentId + LEFT JOIN demographics d ON d.demographicId = us.userId + JOIN coopPositions cp ON cp.coopPositionId = a.coopPositionId + JOIN createsPos cr ON cr.coopPositionId = cp.coopPositionId + JOIN users ue ON ue.userId = cr.employerId + JOIN companyProfiles com ON com.companyProfileId = ue.companyProfileId + GROUP BY com.name, d.gender + ORDER BY com.name, d.gender; + ''' + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + resp = make_response(jsonify(theData)); resp.status_code = 200 + return resp + +# DEI by posting +@demographics.route('/demographics/positions/gender', methods=['GET']) +def dei_positions_gender(): + current_app.logger.info('GET /demographics/positions/gender route') + query = ''' + SELECT + cp.coopPositionId, + cp.title, + com.name AS companyName, + d.gender, + COUNT(*) AS applicationCount + FROM applications a + JOIN appliesToApp ata ON ata.applicationId = a.applicationId + JOIN users us ON us.userId = ata.studentId + LEFT JOIN demographics d ON d.demographicId = us.userId + JOIN coopPositions cp ON cp.coopPositionId = a.coopPositionId + JOIN createsPos cr ON cr.coopPositionId = cp.coopPositionId + JOIN users ue ON ue.userId = cr.employerId + JOIN companyProfiles com ON com.companyProfileId = ue.companyProfileId + GROUP BY cp.coopPositionId, cp.title, com.name, d.gender + ORDER BY cp.title, d.gender; + ''' + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + resp = make_response(jsonify(theData)); resp.status_code = 200 + return resp \ No newline at end of file diff --git a/api/backend/ml_models/model01.py b/api/backend/ml_models/model01.py deleted file mode 100644 index 368152fbab..0000000000 --- a/api/backend/ml_models/model01.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -model01.py is an example of how to access model parameter values that you are storing -in the database and use them to make a prediction when a route associated with prediction is -accessed. -""" -from backend.db_connection import db -import numpy as np -import logging - - -def train(): - """ - You could have a function that performs training from scratch as well as testing (see below). - It could be activated from a route for an "administrator role" or something similar. - """ - return 'Training the model' - -def test(): - return 'Testing the model' - -def predict(var01, var02): - """ - Retreives model parameters from the database and uses them for real-time prediction - """ - # get a database cursor - cursor = db.get_db().cursor() - # get the model params from the database - query = 'SELECT beta_vals FROM model1_params ORDER BY sequence_number DESC LIMIT 1' - cursor.execute(query) - return_val = cursor.fetchone() - - params = return_val['beta_vals'] - logging.info(f'params = {params}') - logging.info(f'params datatype = {type(params)}') - - # turn the values from the database into a numpy array - params_array = np.array(list(map(float, params[1:-1].split(',')))) - logging.info(f'params array = {params_array}') - logging.info(f'params_array datatype = {type(params_array)}') - - # turn the variables sent from the UI into a numpy array - input_array = np.array([1.0, float(var01), float(var02)]) - - # calculate the dot product (since this is a fake regression) - prediction = np.dot(params_array, input_array) - - return prediction - diff --git a/api/backend/products/products_routes.py b/api/backend/products/products_routes.py deleted file mode 100644 index a3e596d0d3..0000000000 --- a/api/backend/products/products_routes.py +++ /dev/null @@ -1,208 +0,0 @@ -######################################################## -# Sample customers blueprint of endpoints -# Remove this file if you are not using it in your project -######################################################## - -from flask import Blueprint -from flask import request -from flask import jsonify -from flask import make_response -from flask import current_app -from backend.db_connection import db - -#------------------------------------------------------------ -# Create a new Blueprint object, which is a collection of -# routes. -products = Blueprint('products', __name__) - -#------------------------------------------------------------ -# Get all the products from the database, package them up, -# and return them to the client -@products.route('/products', methods=['GET']) -def get_products(): - query = ''' - SELECT id, - product_code, - product_name, - list_price, - category - FROM products - ''' - - # get a cursor object from the database - cursor = db.get_db().cursor() - - # use cursor to query the database for a list of products - cursor.execute(query) - - # fetch all the data from the cursor - # The cursor will return the data as a - # Python Dictionary - theData = cursor.fetchall() - - # Create a HTTP Response object and add results of the query to it - # after "jasonify"-ing it. - response = make_response(jsonify(theData)) - # set the proper HTTP Status code of 200 (meaning all good) - response.status_code = 200 - # send the response back to the client - return response - -# ------------------------------------------------------------ -# get product information about a specific product -# notice that the route takes and then you see id -# as a parameter to the function. This is one way to send -# parameterized information into the route handler. -@products.route('/product/', methods=['GET']) -def get_product_detail (id): - - query = f'''SELECT id, - product_name, - description, - list_price, - category - FROM products - WHERE id = {str(id)} - ''' - - # logging the query for debugging purposes. - # The output will appear in the Docker logs output - # This line has nothing to do with actually executing the query... - # It is only for debugging purposes. - current_app.logger.info(f'GET /product/ query={query}') - - # get the database connection, execute the query, and - # fetch the results as a Python Dictionary - cursor = db.get_db().cursor() - cursor.execute(query) - theData = cursor.fetchall() - - # Another example of logging for debugging purposes. - # You can see if the data you're getting back is what you expect. - current_app.logger.info(f'GET /product/ Result of query = {theData}') - - response = make_response(jsonify(theData)) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# Get the top 5 most expensive products from the database -@products.route('/mostExpensive') -def get_most_pop_products(): - - query = ''' - SELECT product_code, - product_name, - list_price, - reorder_level - FROM products - ORDER BY list_price DESC - LIMIT 5 - ''' - - # Same process as handler above - cursor = db.get_db().cursor() - cursor.execute(query) - theData = cursor.fetchall() - - response = make_response(jsonify(theData)) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# Route to get the 10 most expensive items from the -# database. -@products.route('/tenMostExpensive', methods=['GET']) -def get_10_most_expensive_products(): - - query = ''' - SELECT product_code, - product_name, - list_price, - reorder_level - FROM products - ORDER BY list_price DESC - LIMIT 10 - ''' - - # Same process as above - cursor = db.get_db().cursor() - cursor.execute(query) - theData = cursor.fetchall() - - response = make_response(jsonify(theData)) - response.status_code = 200 - return response - - -# ------------------------------------------------------------ -# This is a POST route to add a new product. -# Remember, we are using POST routes to create new entries -# in the database. -@products.route('/product', methods=['POST']) -def add_new_product(): - - # In a POST request, there is a - # collecting data from the request object - the_data = request.json - current_app.logger.info(the_data) - - #extracting the variable - name = the_data['product_name'] - description = the_data['product_description'] - price = the_data['product_price'] - category = the_data['product_category'] - - query = f''' - INSERT INTO products (product_name, - description, - category, - list_price) - VALUES ('{name}', '{description}', '{category}', {str(price)}) - ''' - # TODO: Make sure the version of the query above works properly - # Constructing the query - # query = 'insert into products (product_name, description, category, list_price) values ("' - # query += name + '", "' - # query += description + '", "' - # query += category + '", ' - # query += str(price) + ')' - current_app.logger.info(query) - - # executing and committing the insert statement - cursor = db.get_db().cursor() - cursor.execute(query) - db.get_db().commit() - - response = make_response("Successfully added product") - response.status_code = 200 - return response - -# ------------------------------------------------------------ -### Get all product categories -@products.route('/categories', methods = ['GET']) -def get_all_categories(): - query = ''' - SELECT DISTINCT category AS label, category as value - FROM products - WHERE category IS NOT NULL - ORDER BY category - ''' - - cursor = db.get_db().cursor() - cursor.execute(query) - theData = cursor.fetchall() - - response = make_response(jsonify(theData)) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# This is a stubbed route to update a product in the catalog -# The SQL query would be an UPDATE. -@products.route('/product', methods = ['PUT']) -def update_product(): - product_info = request.json - current_app.logger.info(product_info) - - return "Success" \ No newline at end of file diff --git a/api/backend/rest_entry.py b/api/backend/rest_entry.py index d8d78502d9..ec24b7a445 100644 --- a/api/backend/rest_entry.py +++ b/api/backend/rest_entry.py @@ -1,9 +1,15 @@ from flask import Flask from backend.db_connection import db -from backend.customers.customer_routes import customers -from backend.products.products_routes import products -from backend.simple.simple_routes import simple_routes +from backend.users.users_routes import users +from backend.coopPositions.coopPositions_routes import coopPositions +from backend.companyProfiles.companyProfiles_routes import companyProfiles +from backend.workedatpos.workedatpos_routes import workedatpos +from backend.viewsPos.viewsPos_routes import views_position +from backend.applications.applications_routes import applications +from backend.advisoradvisee.advisoradvisee_routes import advisoradvisee +from backend.demographics.demographics_routes import demographics + import os from dotenv import load_dotenv @@ -20,28 +26,30 @@ def create_app(): # secret key that will be used for securely signing the session # cookie and can be used for any other security related needs by # extensions or your application - # app.config['SECRET_KEY'] = 'someCrazyS3cR3T!Key.!' app.config['SECRET_KEY'] = os.getenv('SECRET_KEY') - # # these are for the DB object to be able to connect to MySQL. - # app.config['MYSQL_DATABASE_USER'] = 'root' + # these are for the DB object to be able to connect to MySQL. app.config['MYSQL_DATABASE_USER'] = os.getenv('DB_USER').strip() - app.config['MYSQL_DATABASE_PASSWORD'] = os.getenv('MYSQL_ROOT_PASSWORD').strip() + app.config['MYSQL_DATABASE_PASSWORD'] = os.getenv('MYSQL_ROOT_PASSWORD') app.config['MYSQL_DATABASE_HOST'] = os.getenv('DB_HOST').strip() app.config['MYSQL_DATABASE_PORT'] = int(os.getenv('DB_PORT').strip()) - app.config['MYSQL_DATABASE_DB'] = os.getenv('DB_NAME').strip() # Change this to your DB name + app.config['MYSQL_DATABASE_DB'] = os.getenv('DB_NAME').strip() # Initialize the database object with the settings above. app.logger.info('current_app(): starting the database connection') db.init_app(app) - # Register the routes from each Blueprint with the app object # and give a url prefix to each app.logger.info('current_app(): registering blueprints with Flask app object.') - app.register_blueprint(simple_routes) - app.register_blueprint(customers, url_prefix='/c') - app.register_blueprint(products, url_prefix='/p') + app.register_blueprint(users) + app.register_blueprint(coopPositions) + app.register_blueprint(companyProfiles) + app.register_blueprint(workedatpos) + app.register_blueprint(views_position) + app.register_blueprint(applications) + app.register_blueprint(advisoradvisee) + app.register_blueprint(demographics) # Don't forget to return the app object return app diff --git a/api/backend/simple/playlist.py b/api/backend/simple/playlist.py deleted file mode 100644 index a9e7a9ef03..0000000000 --- a/api/backend/simple/playlist.py +++ /dev/null @@ -1,129 +0,0 @@ -# ------------------------------------------------------------ -# Sample data for testing generated by ChatGPT -# ------------------------------------------------------------ - -sample_playlist_data = { - "playlist": { - "id": "37i9dQZF1DXcBWIGoYBM5M", - "name": "Chill Hits", - "description": "Relax and unwind with the latest chill hits.", - "owner": { - "id": "spotify_user_123", - "display_name": "Spotify User" - }, - "tracks": { - "items": [ - { - "track": { - "id": "3n3Ppam7vgaVa1iaRUc9Lp", - "name": "Lose Yourself", - "artists": [ - { - "id": "1dfeR4HaWDbWqFHLkxsg1d", - "name": "Eminem" - } - ], - "album": { - "id": "1ATL5GLyefJaxhQzSPVrLX", - "name": "8 Mile" - }, - "duration_ms": 326000, - "track_number": 1, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/lose-yourself.mp3", - "uri": "spotify:track:3n3Ppam7vgaVa1iaRUc9Lp" - } - }, - { - "track": { - "id": "7ouMYWpwJ422jRcDASZB7P", - "name": "Blinding Lights", - "artists": [ - { - "id": "0fW8E0XdT6aG9aFh6jGpYo", - "name": "The Weeknd" - } - ], - "album": { - "id": "1ATL5GLyefJaxhQzSPVrLX", - "name": "After Hours" - }, - "duration_ms": 200040, - "track_number": 9, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/blinding-lights.mp3", - "uri": "spotify:track:7ouMYWpwJ422jRcDASZB7P" - } - }, - { - "track": { - "id": "4uLU6hMCjMI75M1A2tKUQC", - "name": "Shape of You", - "artists": [ - { - "id": "6eUKZXaKkcviH0Ku9w2n3V", - "name": "Ed Sheeran" - } - ], - "album": { - "id": "3fMbdgg4jU18AjLCKBhRSm", - "name": "Divide" - }, - "duration_ms": 233713, - "track_number": 4, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/shape-of-you.mp3", - "uri": "spotify:track:4uLU6hMCjMI75M1A2tKUQC" - } - }, - { - "track": { - "id": "0VjIjW4GlUZAMYd2vXMi3b", - "name": "Levitating", - "artists": [ - { - "id": "4tZwfgrHOc3mvqYlEYSvVi", - "name": "Dua Lipa" - } - ], - "album": { - "id": "7dGJo4pcD2V6oG8kP0tJRR", - "name": "Future Nostalgia" - }, - "duration_ms": 203693, - "track_number": 5, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/levitating.mp3", - "uri": "spotify:track:0VjIjW4GlUZAMYd2vXMi3b" - } - }, - { - "track": { - "id": "6habFhsOp2NvshLv26DqMb", - "name": "Sunflower", - "artists": [ - { - "id": "1dfeR4HaWDbWqFHLkxsg1d", - "name": "Post Malone" - }, - { - "id": "0C8ZW7ezQVs4URX5aX7Kqx", - "name": "Swae Lee" - } - ], - "album": { - "id": "6k3hyp4efgfHP5GMVd3Agw", - "name": "Spider-Man: Into the Spider-Verse (Soundtrack)" - }, - "duration_ms": 158000, - "track_number": 3, - "disc_number": 1, - "preview_url": "https://p.scdn.co/mp3-preview/sunflower.mp3", - "uri": "spotify:track:6habFhsOp2NvshLv26DqMb" - } - } - ] - }, - "uri": "spotify:playlist:37i9dQZF1DXcBWIGoYBM5M" - } -} \ No newline at end of file diff --git a/api/backend/simple/simple_routes.py b/api/backend/simple/simple_routes.py deleted file mode 100644 index 8685fbac76..0000000000 --- a/api/backend/simple/simple_routes.py +++ /dev/null @@ -1,48 +0,0 @@ -from flask import Blueprint, request, jsonify, make_response, current_app, redirect, url_for -import json -from backend.db_connection import db -from backend.simple.playlist import sample_playlist_data - -# This blueprint handles some basic routes that you can use for testing -simple_routes = Blueprint('simple_routes', __name__) - - -# ------------------------------------------------------------ -# / is the most basic route -# Once the api container is started, in a browser, go to -# localhost:4000/playlist -@simple_routes.route('/') -def welcome(): - current_app.logger.info('GET / handler') - welcome_message = '

Welcome to the CS 3200 Project Template REST API' - response = make_response(welcome_message) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# /playlist returns the sample playlist data contained in playlist.py -# (imported above) -@simple_routes.route('/playlist') -def get_playlist_data(): - current_app.logger.info('GET /playlist handler') - response = make_response(jsonify(sample_playlist_data)) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -@simple_routes.route('/niceMesage', methods = ['GET']) -def affirmation(): - message = ''' -

Think about it...

-
- You only need to be 1% better today than you were yesterday! - ''' - response = make_response(message) - response.status_code = 200 - return response - -# ------------------------------------------------------------ -# Demonstrates how to redirect from one route to another. -@simple_routes.route('/message') -def mesage(): - return redirect(url_for(affirmation)) \ No newline at end of file diff --git a/api/backend/users/users_routes.py b/api/backend/users/users_routes.py new file mode 100644 index 0000000000..02115e750a --- /dev/null +++ b/api/backend/users/users_routes.py @@ -0,0 +1,721 @@ +from flask import Blueprint +from flask import request +from flask import jsonify +from flask import make_response +from flask import current_app +from backend.db_connection import db +import logging + +logger = logging.getLogger(__name__) + +users = Blueprint('users', __name__) + +# Get student profiles with demographics +@users.route('/users/', methods=['GET']) +def get_user(userID): + query = ''' + SELECT u.*, d.gender, d.race, d.nationality, d.sexuality, d.disability + FROM users u + LEFT JOIN demographics d ON u.userId = d.demographicId + WHERE u.userId = %s + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (userID,)) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Get student skills with proficiency levels +@users.route('/users//skills', methods=['GET']) +def get_user_skills(userID): + current_app.logger.info(f'GET /users/{userID}/skills route') + + query = ''' + SELECT s.skillId, s.name, s.category, sd.proficiencyLevel + FROM skills s + JOIN skillDetails sd ON s.skillId = sd.skillId + WHERE sd.studentId = %s + ORDER BY s.category, s.name + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (userID,)) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Get recent applications for dashboard (limit 5) +@users.route('/users//recent-applications', methods=['GET']) +def get_user_recent_applications(userID): + current_app.logger.info(f'GET /users/{userID}/recent-applications route') + + query = ''' + SELECT a.applicationId, + a.status, + a.dateTimeApplied, + a.gpa, + cp.title AS positionTitle, + cp.location, + cp.hourlyPay, + cp.deadline, + com.name AS companyName + FROM applications a + JOIN appliesToApp ata ON a.applicationId = ata.applicationId + JOIN coopPositions cp ON a.coopPositionId = cp.coopPositionId + JOIN createsPos crp ON cp.coopPositionId = crp.coopPositionId + JOIN users emp ON crp.employerId = emp.userId + JOIN companyProfiles com ON emp.companyProfileId = com.companyProfileId + WHERE ata.studentId = %s + ORDER BY a.dateTimeApplied DESC + LIMIT 5 + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (userID,)) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Get all available skills +@users.route('/skills', methods=['GET']) +def get_all_skills(): + query = ''' + SELECT skillId, name, category + FROM skills + ORDER BY category, name + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Update user skills (modify proficiency levels and remove skills) +@users.route('/users//skills', methods=['PUT']) +def update_user_skills(userID): + try: + data = request.get_json() + updated_skills = data.get('updated_skills', []) + removed_skills = data.get('removed_skills', []) + + cursor = db.get_db().cursor() + + # Update existing skills proficiency levels + for skill in updated_skills: + update_query = ''' + UPDATE skillDetails + SET proficiencyLevel = %s + WHERE studentId = %s AND skillId = %s + ''' + cursor.execute(update_query, (skill['proficiencyLevel'], userID, skill['skillId'])) + + # Remove skills marked for deletion + if removed_skills: + placeholders = ','.join(['%s'] * len(removed_skills)) + delete_query = f''' + DELETE FROM skillDetails + WHERE studentId = %s AND skillId IN ({placeholders}) + ''' + cursor.execute(delete_query, [userID] + removed_skills) + + db.get_db().commit() + + the_response = make_response(jsonify({"message": "Skills updated successfully"})) + the_response.status_code = 200 + return the_response + + except Exception as e: + logger.error(f"Error updating user skills: {e}") + the_response = make_response(jsonify({"error": "Failed to update skills"})) + the_response.status_code = 500 + return the_response + +# Add new skills to user profile +@users.route('/users//skills', methods=['POST']) +def add_user_skills(userID): + try: + data = request.get_json() + new_skills = data.get('skills', []) + + if not new_skills: + the_response = make_response(jsonify({"error": "No skills provided"})) + the_response.status_code = 400 + return the_response + + cursor = db.get_db().cursor() + + # Add new skills to skillDetails table + for skill in new_skills: + insert_query = ''' + INSERT INTO skillDetails (skillId, studentId, proficiencyLevel) + VALUES (%s, %s, %s) + ''' + cursor.execute(insert_query, (skill['skillId'], userID, skill['proficiencyLevel'])) + + db.get_db().commit() + + the_response = make_response(jsonify({"message": f"Added {len(new_skills)} skills successfully"})) + the_response.status_code = 200 + return the_response + + except Exception as e: + logger.error(f"Error adding user skills: {e}") + the_response = make_response(jsonify({"error": "Failed to add skills"})) + the_response.status_code = 500 + return the_response + +# Get advisor's assigned students +@users.route('/advisors//students', methods=['GET']) +def get_advisor_students(advisorID): + query = ''' + SELECT u.userId, u.firstName, u.lastName, u.email, u.phone, + u.major, u.minor, u.college, u.gradYear, u.grade, + d.gender, d.race, d.nationality, d.sexuality, d.disability, + aa.flag as flagged + FROM users u + LEFT JOIN demographics d ON u.userId = d.demographicId + JOIN advisor_advisee aa ON u.userId = aa.studentId + WHERE aa.advisorId = %s + ORDER BY u.lastName, u.firstName + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (advisorID,)) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Update student flag status for advisor +@users.route('/advisors//students//flag', methods=['PUT']) +def update_student_flag(advisorID, studentID): + try: + data = request.get_json() + flagged = data.get('flagged', False) + + query = ''' + UPDATE advisor_advisee + SET flag = %s + WHERE advisorId = %s AND studentId = %s + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (flagged, advisorID, studentID)) + db.get_db().commit() + + the_response = make_response(jsonify({"message": "Student flag updated successfully", "flagged": flagged})) + the_response.status_code = 200 + return the_response + + except Exception as e: + logger.error(f"Error updating student flag: {e}") + the_response = make_response(jsonify({"error": "Failed to update student flag"})) + the_response.status_code = 500 + return the_response + +# Get placement analytics data for advisor +@users.route('/advisors//analytics/placement-data', methods=['GET']) +def get_advisor_placement_analytics(advisorID): + try: + query = ''' + SELECT + u.firstName, + u.lastName, + u.gradYear, + u.major, + u.college, + a.gpa, + a.status, + cp.title as positionTitle, + cp.hourlyPay as salary, + comp.name as companyName, + cp.industry + FROM users u + JOIN advisor_advisee aa ON u.userId = aa.studentId + JOIN appliesToApp ata ON u.userId = ata.studentId + JOIN applications a ON ata.applicationId = a.applicationId + JOIN coopPositions cp ON a.coopPositionId = cp.coopPositionId + LEFT JOIN createsPos crp ON cp.coopPositionId = crp.coopPositionId + LEFT JOIN users emp ON crp.employerId = emp.userId + LEFT JOIN companyProfiles comp ON emp.companyProfileId = comp.companyProfileId + WHERE aa.advisorId = %s + AND a.status IN ('Accepted', 'Rejected') + AND cp.hourlyPay IS NOT NULL + AND a.gpa IS NOT NULL + + UNION ALL + + SELECT + u.firstName, + u.lastName, + u.gradYear, + u.major, + u.college, + avg_gpa.gpa as gpa, + 'Completed' as status, + cp.title as positionTitle, + cp.hourlyPay as salary, + comp.name as companyName, + cp.industry + FROM users u + JOIN advisor_advisee aa ON u.userId = aa.studentId + JOIN workedAtPos wap ON u.userId = wap.studentId + JOIN coopPositions cp ON wap.coopPositionId = cp.coopPositionId + LEFT JOIN createsPos crp ON cp.coopPositionId = crp.coopPositionId + LEFT JOIN users emp ON crp.employerId = emp.userId + LEFT JOIN companyProfiles comp ON emp.companyProfileId = comp.companyProfileId + LEFT JOIN ( + SELECT ata.studentId, AVG(a.gpa) as gpa + FROM appliesToApp ata + JOIN applications a ON ata.applicationId = a.applicationId + WHERE a.gpa IS NOT NULL + GROUP BY ata.studentId + ) avg_gpa ON u.userId = avg_gpa.studentId + WHERE aa.advisorId = %s + AND cp.hourlyPay IS NOT NULL + + ORDER BY lastName, firstName + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (advisorID, advisorID)) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + + except Exception as e: + logger.error(f"Error fetching placement analytics: {e}") + the_response = make_response(jsonify({"error": "Failed to fetch placement analytics"})) + the_response.status_code = 500 + return the_response + + + +# Update advisor profile (separate from student profile updates) +@users.route('/advisors//profile', methods=['PUT']) +def update_advisor_profile(advisorID): + try: + current_app.logger.info(f'PUT /advisors/{advisorID}/profile route') + advisor_info = request.json + + first_name = advisor_info.get('firstName') + last_name = advisor_info.get('lastName') + email = advisor_info.get('email') + phone = advisor_info.get('phone') + gender = advisor_info.get('gender') + race = advisor_info.get('race') + nationality = advisor_info.get('nationality') + sexuality = advisor_info.get('sexuality') + disability = advisor_info.get('disability') + + # Update users table (basic info) + user_query = ''' + UPDATE users + SET firstName = %s, + lastName = %s, + email = %s, + phone = %s + WHERE userId = %s + ''' + + # Update demographics table + demo_query = ''' + UPDATE demographics + SET gender = %s, + race = %s, + nationality = %s, + sexuality = %s, + disability = %s + WHERE demographicId = %s + ''' + + cursor = db.get_db().cursor() + + # Execute user update + cursor.execute(user_query, (first_name, last_name, email, phone, advisorID)) + + # Execute demographics update + cursor.execute(demo_query, (gender, race, nationality, sexuality, disability, advisorID)) + + db.get_db().commit() + + the_response = make_response(jsonify({"message": "Advisor profile updated successfully"})) + the_response.status_code = 200 + return the_response + + except Exception as e: + logger.error(f"Error updating advisor profile: {e}") + the_response = make_response(jsonify({"error": "Failed to update advisor profile"})) + the_response.status_code = 500 + return the_response + +# Update student profiles to include additional info +@users.route('/users', methods=['PUT']) +def update_users(): + current_app.logger.info('PUT /users route') + user_info = request.json + user_id = user_info['userId'] + first_name = user_info['firstName'] + last_name = user_info['lastName'] + email = user_info['email'] + phone = user_info['phone'] + major = user_info['major'] + minor = user_info['minor'] + college = user_info['college'] + grad_year = user_info['gradYear'] + grade = user_info['grade'] + gender = user_info['gender'] + race = user_info['race'] + nationality = user_info['nationality'] + sexuality = user_info['sexuality'] + disability = user_info['disability'] + + query = ''' + UPDATE users u + JOIN demographics d ON u.userId = d.demographicId + SET u.firstName = %s, + u.lastName = %s, + u.email = %s, + u.phone = %s, + u.major = %s, + u.minor = %s, + u.college = %s, + u.gradYear = %s, + u.grade = %s, + d.gender = %s, + d.race = %s, + d.nationality = %s, + d.sexuality = %s, + d.disability = %s + WHERE u.userId = %s;''' + data = (first_name, last_name, email, phone, major, minor, college, grad_year, grade, gender, race, nationality, sexuality, disability, user_id) + cursor = db.get_db().cursor() + cursor.execute(query, data) + db.get_db().commit() + return 'user updated!' + + +# Employer views student profile +@users.route('/applications/appliesToApp//users', methods=['GET']) +def employer_view_student(): + current_app.logger.info('GET /applications/appliesToApp//users') + user_info = request.json + application_info = request.json + user_id = user_info['userId'] + first_name = user_info['firstName'] + last_name = user_info['lastName'] + email = user_info['email'] + major = user_info['major'] + minor = user_info['minor'] + college = user_info['college'] + grade = user_info['grade'] + grad_year = user_info['gradYear'] + gpa = application_info['gpa'] + resume = application_info['resume'] + cover_letter = application_info['coverLetter'] + + + query = ''' + SELECT u.userId, u.firstName, u.lastName, u.email, u.major, + u.minor, u.college, u.grade, u.gradYear, a.gpa, + a.resume, a.coverLetter + FROM users u JOIN applications a;''' + data = (first_name, last_name, email, major, minor, college, grad_year, grade, user_id, + gpa, resume, cover_letter) + cursor = db.get_db().cursor() + r = cursor.execute(query, data) + db.get_db().commit() + + + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Employer filters student profiles +@users.route('/applications/appliesToApp//users', methods=['GET']) +def employee_filter_student(): + current_app.logger.info('GET /applications/appliesToApp//users') + user_info = request.json + skill_info = request.json + user_id = user_info['userId'] + first_name = user_info['firstName'] + last_name = user_info['lastName'] + name = skill_info['name'] + grad_year = user_info['gradYear'] + major = user_info['major'] + + query = ''' + SELECT DISTINCT u.userId, u.firstName, u.lastName, u.gradYear, u.major + FROM users u + JOIN skillDetails sd ON u.userId = sd.studentId + JOIN skills s ON sd.skillId = s.skillId + WHERE (s.name = %s OR s.name = %s OR s.name = %s) + AND u.gradYear = %s + AND u.major = %s; + ''' + data = (skill1, skill2, skill3, grad_year, major) + + cursor = db.get_db().cursor(dictionary=True) + cursor.execute(query, data) + theData = cursor.fetchall() + + return make_response(jsonify(theData), 200) + +# Admin creates a user (student/employer/advisor) +@users.route('/users', methods=['POST']) +def create_user(): + current_app.logger.info('POST /users route') + + b = request.json + user_id = b['userId'] + first_name = b['firstName'] + last_name = b['lastName'] + email = b['email'] + phone = b.get('phone') + major = b.get('major') + minor = b.get('minor') + college = b.get('college') + grad_year = b.get('gradYear') + grade = b.get('grade') + company_profile_id = b.get('companyProfileId') + industry = b.get('industry') + demographic_id = b.get('demographicId') + + query = ''' + INSERT INTO users + (userId, firstName, lastName, demographicId, email, phone, + major, minor, college, gradYear, grade, companyProfileId, industry) + VALUES + (%s, %s, %s, %s, %s, %s, + %s, %s, %s, %s, %s, %s, %s); + ''' + data = (user_id, first_name, last_name, demographic_id, email, phone,major, minor, college, grad_year, grade, company_profile_id, industry) + + cur = db.get_db().cursor() + cur.execute(query, data) + db.get_db().commit() + return 'user created!', 201 + +# Admin deletes a user +@users.route('/users', methods=['DELETE']) +def delete_user(): + current_app.logger.info('DELETE /users route') + + user_id = request.args.get('userId', type=int) + + query = ''' + DELETE FROM users + WHERE userId = %s; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (user_id,)) + db.get_db().commit() + + the_response = make_response(jsonify({'message': 'user deleted!'})) + + the_response.status_code = 200 + return the_response + +# Employer creates a company profile +@users.route('/users/companyProfiles/create', methods=['POST']) +def createCompanyProfile(): + + the_data = request.json + current_app.logger.info(the_data) + + name = the_data['company_name'] + bio = the_data['company_bio'] + industry = the_data['company_industry'] + websiteLink = the_data['website_link'] + + query = f''' + INSERT INTO companyProfiles (name, bio, industry, websiteLink) + VALUE( '{name}', '{bio}', '{industry}', '{websiteLink}') + ''' + + current_app.logger.info(query) + cursor = db.get_db().cursor() + cursor.execute(query) + db.get_db().commit() + + response = make_response("Created company profile") + response.status_code = 200 + return response + +# Employer updates/edits company information +@users.route('/users/companyProfiles/create/', methods=['PUT']) +def updateCompanyProfile(companyProfileId): + current_app.logger.info('PUT /users/companyProfiles/create/ route') + + company_info = request.json + companyId = company_info['id'] + companyName = company_info['name'] + companyBio = company_info['bio'] + companyIndustry = company_info['industry'] + companyWebsite = company_info['website_link'] + + query = ''' + UPDATE companyProfiles + SET name = %s, + bio = %s, + industry = %s, + websiteLink = %s + WHERE companyProfileId = %s + ''' + data = (companyName, companyBio, companyIndustry, companyWebsite, companyId) + cursor = db.get_db().cursor() + r = cursor.execute(query, data) + db.get_db().commit() + return 'Updated company profile!' + +# Get count of students +@users.route('/users/count/students', methods=['GET']) +def count_students(): + current_app.logger.info('GET /users/count/students route') + + try: + query = ''' + SELECT COUNT(*) as student_count + FROM users + WHERE major IS NOT NULL AND major != '' AND companyProfileId IS NULL; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + result = cursor.fetchone() + + the_response = make_response(jsonify({"student_count": result['student_count']})) + the_response.status_code = 200 + return the_response + + except Exception as e: + current_app.logger.error(f"Error counting students: {e}") + logger.error(f"Error counting students: {e}") + the_response = make_response(jsonify({"error": f"Failed to count students: {str(e)}"})) + the_response.status_code = 500 + return the_response + +# Get count of advisors +@users.route('/users/count/advisors', methods=['GET']) +def count_advisors(): + current_app.logger.info('GET /users/count/advisors route') + + try: + query = ''' + SELECT COUNT(DISTINCT aa.advisorId) as advisor_count + FROM advisor_advisee aa; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + result = cursor.fetchone() + + the_response = make_response(jsonify({"advisor_count": result['advisor_count']})) + the_response.status_code = 200 + return the_response + + except Exception as e: + current_app.logger.error(f"Error counting advisors: {e}") + logger.error(f"Error counting advisors: {e}") + the_response = make_response(jsonify({"error": f"Failed to count advisors: {str(e)}"})) + the_response.status_code = 500 + return the_response + +# Get count of employers +@users.route('/users/count/employers', methods=['GET']) +def count_employers(): + current_app.logger.info('GET /users/count/employers route') + + try: + query = ''' + SELECT COUNT(*) as employer_count + FROM users + WHERE companyProfileId IS NOT NULL; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + result = cursor.fetchone() + + the_response = make_response(jsonify({"employer_count": result['employer_count']})) + the_response.status_code = 200 + return the_response + + except Exception as e: + current_app.logger.error(f"Error counting employers: {e}") + logger.error(f"Error counting employers: {e}") + the_response = make_response(jsonify({"error": f"Failed to count employers: {str(e)}"})) + the_response.status_code = 500 + return the_response + +# Get all user counts in one request +@users.route('/users/count/all', methods=['GET']) +def count_all_users(): + current_app.logger.info('GET /users/count/all route') + + try: + # Count students + student_query = ''' + SELECT COUNT(*) as student_count + FROM users + WHERE major IS NOT NULL AND major != '' AND companyProfileId IS NULL + ''' + + # Count advisors + advisor_query = ''' + SELECT COUNT(DISTINCT aa.advisorId) as advisor_count + FROM advisor_advisee aa + ''' + + # Count employers + employer_query = ''' + SELECT COUNT(*) as employer_count + FROM users + WHERE companyProfileId IS NOT NULL + ''' + + cursor = db.get_db().cursor() + + # Execute all queries + cursor.execute(student_query) + student_count = cursor.fetchone()['student_count'] + + cursor.execute(advisor_query) + advisor_count = cursor.fetchone()['advisor_count'] + + cursor.execute(employer_query) + employer_count = cursor.fetchone()['employer_count'] + + counts = { + "student_count": student_count, + "advisor_count": advisor_count, + "employer_count": employer_count, + "total_users": student_count + advisor_count + employer_count + } + + the_response = make_response(jsonify(counts)) + the_response.status_code = 200 + return the_response + + except Exception as e: + current_app.logger.error(f"Error counting all users: {e}") + logger.error(f"Error counting all users: {e}") + the_response = make_response(jsonify({"error": f"Failed to count users: {str(e)}"})) + the_response.status_code = 500 + return the_response \ No newline at end of file diff --git a/api/backend/viewsPos/viewsPos_routes.py b/api/backend/viewsPos/viewsPos_routes.py new file mode 100644 index 0000000000..94c159b0f4 --- /dev/null +++ b/api/backend/viewsPos/viewsPos_routes.py @@ -0,0 +1,126 @@ +from flask import Blueprint +from flask import request +from flask import jsonify +from flask import make_response +from flask import current_app +from backend.db_connection import db + +# New Blueprint for applications +views_position = Blueprint('views_position', __name__) + +# student flags positions they like/do not like +@views_position.route('/position', methods=['POST']) +def set_job_preference(): + the_data = request.json + current_app.logger.info(the_data) + + student_id = the_data['studentId'] + coop_position_id = the_data['coopPositionId'] + preference = the_data['preference'] + + query = f''' + INSERT INTO viewsPos (studentId, coopPositionId, preference) + VALUES (%s, %s, %s) + ON DUPLICATE KEY UPDATE preference = VALUES(preference) + ''' + current_app.logger.info(query) + + # Execute query and commit + cursor = db.get_db().cursor() + cursor.execute(query, (student_id, coop_position_id, int(preference))) + db.get_db().commit() + + # Return success response + response = make_response("Preference saved successfully") + response.status_code = 200 + return response + + +# Student views deadlines for positions +@views_position.route('//deadlines', methods=['GET']) +def get_deadlines(studentID): + current_app.logger.info(f'GET /{studentID}/deadlines route') + + query = ''' + SELECT cp.title, + cp.deadline + FROM viewsPos vp + JOIN coopPositions cp ON vp.coopPositionId = cp.coopPositionId + WHERE vp.studentId = %s AND vp.preference = TRUE; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (studentID,)) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response + +# Student views positions based on preference +@views_position.route('/viewpos/', methods=['GET']) +def get_positions_by_preference(studentID): + current_app.logger.info(f'GET /viewpos/{studentID} route') + + pref_param = request.args.get('preference') + + # Validate preference query param and build SQL condition + preference_clause = '' + if pref_param is not None: + if pref_param.lower() in ['true', '1']: + preference_clause = 'AND vp.preference = TRUE' + elif pref_param.lower() in ['false', '0']: + preference_clause = 'AND vp.preference = FALSE' + else: + return jsonify({"error": "Invalid preference value"}), 400 + + query = f''' + SELECT cp.* + FROM viewsPos vp + JOIN coopPositions cp ON cp.coopPositionId = vp.coopPositionId + WHERE vp.studentId = %s + {preference_clause} + ''' + + try: + cursor = db.get_db().cursor() + cursor.execute(query, (studentID,)) + data = cursor.fetchall() + return jsonify(data), 200 + except Exception as e: + current_app.logger.error(f"Error fetching positions by preference: {e}") + return jsonify({"error": "Server error"}), 500 + + +# Admin views preference metrics +@views_position.route('/viewspos/', methods=['GET']) +def get_preference_metrics(preference): + current_app.logger.info('GET /viewspos/%s route', preference) + + query = ''' + SELECT + cp.coopPositionId, + cp.title, + com.name AS companyName, + COUNT(vp.studentId) AS prefCount + FROM coopPositions cp + LEFT JOIN createsPos cr + ON cr.coopPositionId = cp.coopPositionId + LEFT JOIN users u + ON u.userId = cr.employerId + LEFT JOIN companyProfiles com + ON com.companyProfileId = u.companyProfileId + LEFT JOIN viewsPos vp + ON vp.coopPositionId = cp.coopPositionId + AND vp.preference = %s + GROUP BY cp.coopPositionId, cp.title, com.name + ORDER BY prefCount DESC, cp.title ASC; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query, (preference,)) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + return the_response \ No newline at end of file diff --git a/api/backend/workedatpos/workedatpos_routes.py b/api/backend/workedatpos/workedatpos_routes.py new file mode 100644 index 0000000000..1621809c4f --- /dev/null +++ b/api/backend/workedatpos/workedatpos_routes.py @@ -0,0 +1,113 @@ +from flask import Blueprint +from flask import request +from flask import jsonify +from flask import make_response +from flask import current_app +from backend.db_connection import db + +workedatpos = Blueprint('workedatpos', __name__) + +# Advisor views historical placement data for all students (filter by major/industry in frontend) +@workedatpos.route('/workedatpos/placement-data', methods=['GET']) +def get_scatter_plot_data(): + current_app.logger.info('GET /workedatpos/placement-data route') + + query = ''' + SELECT u.major, + cp.industry, + a.gpa, + cp.hourlyPay, + wp.studentId AS wasHired, + u.firstName, + u.lastName, + cp.title AS positionTitle, + comp.name AS companyName, + u.college, + u.gradYear, + cp.location + FROM users u + JOIN appliesToApp ata ON u.userId = ata.studentId + JOIN applications a ON ata.applicationId = a.applicationId + JOIN coopPositions cp ON a.coopPositionId = cp.coopPositionId + JOIN companyProfiles comp ON cp.industry = comp.industry + LEFT JOIN workedAtPos wp ON u.userId = wp.studentId AND wp.coopPositionId = cp.coopPositionId + WHERE a.gpa IS NOT NULL + AND cp.hourlyPay IS NOT NULL + ORDER BY u.major, cp.industry, a.gpa DESC + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + the_response.headers.add('Access-Control-Allow-Origin', '*') + the_response.headers.add('Access-Control-Allow-Headers', 'Content-Type') + return the_response + +# Advisor and student views company rating data rated by past co-ops +@workedatpos.route('/workedatpos/company-ratings', methods=['GET']) +def get_company_ratings(): + current_app.logger.info('GET /workedatpos/company-ratings route') + + # Query to get company ratings by individual company + query = ''' + SELECT + comp.companyProfileId, + comp.name AS companyName, + comp.industry AS companyIndustry, + AVG(wp.companyRating) AS avgRating, + COUNT(wp.companyRating) AS totalRatings, + MIN(wp.companyRating) AS minRating, + MAX(wp.companyRating) AS maxRating, + COUNT(DISTINCT wp.studentId) AS studentsWhoRated + FROM workedAtPos wp + JOIN coopPositions cp ON wp.coopPositionId = cp.coopPositionId + JOIN companyProfiles comp ON cp.industry = comp.industry + WHERE wp.companyRating IS NOT NULL + GROUP BY comp.companyProfileId, comp.name, comp.industry + ORDER BY avgRating DESC; + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + the_response.headers.add('Access-Control-Allow-Origin', '*') + the_response.headers.add('Access-Control-Allow-Headers', 'Content-Type') + return the_response + +# Student views wage data from past co-ops +@workedatpos.route('/workedatpos/wagedata', methods=['GET']) +def get_company_wage_data(): + current_app.logger.info('GET /workedatpos/wagedata route') + + query = ''' + SELECT cp.name AS companyName, + pos.title AS positionTitle, + MIN(pos.hourlyPay) AS minSalary, + MAX(pos.hourlyPay) AS maxSalary, + AVG(pos.hourlyPay) AS avgPay, + COUNT(w.studentId) AS numPreviousCoops + FROM companyProfiles cp JOIN users u ON cp.companyProfileId = u.companyProfileId + JOIN createsPos cr ON u.userId = cr.employerId + JOIN coopPositions pos ON cr.coopPositionId = pos.coopPositionId + LEFT JOIN workedAtPos w ON pos.coopPositionId = w.coopPositionId + GROUP BY cp.name, pos.title + ORDER BY avgPay DESC; + + ''' + + cursor = db.get_db().cursor() + cursor.execute(query) + theData = cursor.fetchall() + + the_response = make_response(jsonify(theData)) + the_response.status_code = 200 + the_response.headers.add('Access-Control-Allow-Origin', '*') + the_response.headers.add('Access-Control-Allow-Headers', 'Content-Type') + return the_response + diff --git a/app/Dockerfile b/app/Dockerfile index 6eb11bff2e..c4f7981358 100644 --- a/app/Dockerfile +++ b/app/Dockerfile @@ -5,7 +5,6 @@ WORKDIR /appcode RUN apt-get update && apt-get install -y \ build-essential \ curl \ - software-properties-common \ git \ && rm -rf /var/lib/apt/lists/* diff --git a/app/src/pages/01_World_Bank_Viz.py b/app/pages-from-demo/01_World_Bank_Viz.py similarity index 100% rename from app/src/pages/01_World_Bank_Viz.py rename to app/pages-from-demo/01_World_Bank_Viz.py diff --git a/app/src/pages/02_Map_Demo.py b/app/pages-from-demo/02_Map_Demo.py similarity index 100% rename from app/src/pages/02_Map_Demo.py rename to app/pages-from-demo/02_Map_Demo.py diff --git a/app/src/pages/03_Simple_Chat_Bot.py b/app/pages-from-demo/03_Simple_Chat_Bot.py similarity index 100% rename from app/src/pages/03_Simple_Chat_Bot.py rename to app/pages-from-demo/03_Simple_Chat_Bot.py diff --git a/app/src/pages/04_Prediction.py b/app/pages-from-demo/04_Prediction.py similarity index 100% rename from app/src/pages/04_Prediction.py rename to app/pages-from-demo/04_Prediction.py diff --git a/app/src/pages/12_API_Test.py b/app/pages-from-demo/12_API_Test.py similarity index 100% rename from app/src/pages/12_API_Test.py rename to app/pages-from-demo/12_API_Test.py diff --git a/app/src/pages/13_Classification.py b/app/pages-from-demo/13_Classification.py similarity index 100% rename from app/src/pages/13_Classification.py rename to app/pages-from-demo/13_Classification.py diff --git a/app/src/pages/21_ML_Model_Mgmt.py b/app/pages-from-demo/21_ML_Model_Mgmt.py similarity index 100% rename from app/src/pages/21_ML_Model_Mgmt.py rename to app/pages-from-demo/21_ML_Model_Mgmt.py diff --git a/app/src/.streamlit/config.toml b/app/src/.streamlit/config.toml index bb28be97de..1f0ce5efe6 100644 --- a/app/src/.streamlit/config.toml +++ b/app/src/.streamlit/config.toml @@ -15,6 +15,6 @@ showSidebarNavigation = false [theme] # Setting some basic config options related to the theme of the app base="light" -primaryColor="#6550e6" -font="monospace" +primaryColor="#1e3a8a" +font="sans serif" diff --git a/app/src/Home.py b/app/src/Home.py index ef0f7b19ad..7eb50f9ed2 100644 --- a/app/src/Home.py +++ b/app/src/Home.py @@ -32,46 +32,185 @@ # The major content of this page # *************************************************** -# set the title of the page and provide a simple prompt. +# Custom CSS for styling +st.markdown(""" + +""", unsafe_allow_html=True) + +# Enhanced header section +st.markdown('

CoopAlytics

', unsafe_allow_html=True) +st.markdown('

Your Gateway to Co-op Data Analytics & Management

', unsafe_allow_html=True) + +# Welcome section with feature highlights +st.markdown(""" +
+

Welcome to CoopAlytics! 🎯

+

Select your role below to access personalized dashboards and insights

+
+""", unsafe_allow_html=True) + logger.info("Loading the Home page of the app") -st.title('CS 3200 Sample Semester Project App') -st.write('\n\n') -st.write('### HI! As which user would you like to log in?') - -# For each of the user personas for which we are implementing -# functionality, we put a button on the screen that the user -# can click to MIMIC logging in as that mock user. - -if st.button("Act as John, a Political Strategy Advisor", - type = 'primary', - use_container_width=True): - # when user clicks the button, they are now considered authenticated - st.session_state['authenticated'] = True - # we set the role of the current user - st.session_state['role'] = 'pol_strat_advisor' - # we add the first name of the user (so it can be displayed on - # subsequent pages). - st.session_state['first_name'] = 'John' - # finally, we ask streamlit to switch to another page, in this case, the - # landing page for this particular user type - logger.info("Logging in as Political Strategy Advisor Persona") - st.switch_page('pages/00_Pol_Strat_Home.py') - -if st.button('Act as Mohammad, an USAID worker', - type = 'primary', - use_container_width=True): - st.session_state['authenticated'] = True - st.session_state['role'] = 'usaid_worker' - st.session_state['first_name'] = 'Mohammad' - st.switch_page('pages/10_USAID_Worker_Home.py') - -if st.button('Act as System Administrator', - type = 'primary', - use_container_width=True): - st.session_state['authenticated'] = True - st.session_state['role'] = 'administrator' - st.session_state['first_name'] = 'SysAdmin' - st.switch_page('pages/20_Admin_Home.py') + +# Create three columns for better layout +col1, col2, col3 = st.columns([1, 2, 1]) + +with col2: + st.markdown("### Choose Your User Role") + + # Student Persona + st.markdown(""" +
+
🎓 Student Portal
+
+ Access your application status, explore co-op opportunities, track your progress, + and get insights on industry trends and placement data. +
+
+ """, unsafe_allow_html=True) + + if st.button("Login as Student", + type='primary', + use_container_width=True, + key="student_login"): + st.session_state['authenticated'] = True + st.session_state['role'] = 'student' + st.session_state['first_name'] = 'Charlie' + st.session_state['user_id'] = 1 + logger.info("Logging in as Student Persona") + st.switch_page('pages/00_Student_Home.py') + + st.markdown("
", unsafe_allow_html=True) + + # Advisor Persona + st.markdown(""" +
+
👨‍🏫 Academic Advisor Portal
+
+ Monitor your advisees' application progress, analyze placement trends, + identify students needing support, and access comprehensive analytics. +
+
+ """, unsafe_allow_html=True) + + if st.button('Login as Academic Advisor', + type='primary', + use_container_width=True, + key="advisor_login"): + st.session_state['authenticated'] = True + st.session_state['role'] = 'advisor' + st.session_state['first_name'] = 'Dr. Sarah' + st.session_state['user_id'] = 31 # Sarah Martinez + logger.info("Logging in as Academic Advisor Persona") + st.switch_page('pages/10_Advisor_Home.py') + + st.markdown("
", unsafe_allow_html=True) + + # Employer Persona + st.markdown(""" +
+
🏢 Employer Portal
+
+ Manage co-op positions, review applications, track hiring metrics, + and access candidate analytics to make informed hiring decisions. +
+
+ """, unsafe_allow_html=True) + + if st.button('Login as Employer', + type='primary', + use_container_width=True, + key="employer_login"): + st.session_state['authenticated'] = True + st.session_state['role'] = 'employer' + st.session_state['first_name'] = 'Jennifer' + logger.info("Logging in as Employer Persona") + st.switch_page('pages/20_Employer_Home.py') + + st.markdown("
", unsafe_allow_html=True) + + # System Administrator + st.markdown(""" +
+
⚙️ System Administrator
+
+ Access system-wide analytics, manage user accounts, monitor platform performance, + and maintain database integrity across all user roles. +
+
+ """, unsafe_allow_html=True) + + if st.button('Login as System Administrator', + type='primary', + use_container_width=True, + key="admin_login"): + st.session_state['authenticated'] = True + st.session_state['role'] = 'administrator' + st.session_state['first_name'] = 'SysAdmin' + logger.info("Logging in as System Administrator Persona") + st.switch_page('pages/30_Admin_Home.py') + +# Footer section +st.markdown("

", unsafe_allow_html=True) +st.markdown(""" +
+ 🚀 Platform Features: Application Tracking • Placement Analytics • Company Ratings • + GPA vs Salary Insights • Industry Trends • Student Progress Monitoring +
+""", unsafe_allow_html=True) diff --git a/app/src/assets/coopalyticslogo.png b/app/src/assets/coopalyticslogo.png new file mode 100644 index 0000000000..68b3f50c6e Binary files /dev/null and b/app/src/assets/coopalyticslogo.png differ diff --git a/app/src/modules/nav.py b/app/src/modules/nav.py index cb31d3bf67..108c7e45cc 100644 --- a/app/src/modules/nav.py +++ b/app/src/modules/nav.py @@ -11,48 +11,117 @@ def HomeNav(): def AboutPageNav(): - st.sidebar.page_link("pages/30_About.py", label="About", icon="🧠") + st.sidebar.page_link("pages/90_About.py", label="About", icon="🧠") -#### ------------------------ Examples for Role of pol_strat_advisor ------------------------ -def PolStratAdvHomeNav(): +#### ------------------------ Student (Charlie Stout) Role ------------------------ +def StudentHomeNav(): st.sidebar.page_link( - "pages/00_Pol_Strat_Home.py", label="Political Strategist Home", icon="👤" + "pages/00_Student_Home.py", label="Student Dashboard", icon="🎓" ) -def WorldBankVizNav(): +def StudentApplicationsNav(): st.sidebar.page_link( - "pages/01_World_Bank_Viz.py", label="World Bank Visualization", icon="🏦" + "pages/01_Student_Applications.py", label="My Applications", icon="📝" ) -def MapDemoNav(): - st.sidebar.page_link("pages/02_Map_Demo.py", label="Map Demonstration", icon="🗺️") +def StudentPositionsNav(): + st.sidebar.page_link( + "pages/02_Student_Browse_Positions.py", label="Browse Co-op Positions", icon="🔍" + ) -## ------------------------ Examples for Role of usaid_worker ------------------------ -def ApiTestNav(): - st.sidebar.page_link("pages/12_API_Test.py", label="Test the API", icon="🛜") +def StudentAnalyticsNav(): + st.sidebar.page_link( + "pages/03_Student_Analytics.py", label="Salary & Company Data", icon="📊" + ) + +def StudentCalendarNav(): + st.sidebar.page_link( + "pages/04_Student_Calendar.py", label="Application Calendar", icon="📅" + ) -def PredictionNav(): +#### ------------------------ Advisor (Sarah Martinez) Role ------------------------ +def AdvisorHomeNav(): st.sidebar.page_link( - "pages/11_Prediction.py", label="Regression Prediction", icon="📈" + "pages/10_Advisor_Home.py", label="Advisor Dashboard", icon="👨‍🏫" + ) + +def AdvisorStudentManagementNav(): + st.sidebar.page_link( + "pages/13_Advisor_StudentManagement.py", label="Student Management", icon="👥" ) -def ClassificationNav(): +def AdvisorAnalyticsNav(): st.sidebar.page_link( - "pages/13_Classification.py", label="Classification Demo", icon="🌺" + "pages/11_Advisor_Analytics.py", label="Placement Analytics", icon="📈" ) -#### ------------------------ System Admin Role ------------------------ -def AdminPageNav(): - st.sidebar.page_link("pages/20_Admin_Home.py", label="System Admin", icon="🖥️") +def AdvisorCompaniesNav(): st.sidebar.page_link( - "pages/21_ML_Model_Mgmt.py", label="ML Model Management", icon="🏢" + "pages/12_Advisor_Companies.py", label="Company Partnerships", icon="🏢" + ) + + +#### ------------------------ Employer (Phoebe Hwang) Role ------------------------ +def EmployerHomeNav(): + st.sidebar.page_link( + "pages/20_Employer_Home.py", label="Employer Dashboard", icon="🏢" + ) + + +def EmployerPostingsNav(): + st.sidebar.page_link( + "pages/21_Employer_Postings.py", label="Manage Job Postings", icon="📄" + ) + + +def EmployerApplicationsNav(): + st.sidebar.page_link( + "pages/22_Employer_Applications.py", label="Review Applications", icon="👀" + ) + + +def EmployerCandidatesNav(): + st.sidebar.page_link( + "pages/23_Employer_Candidates.py", label="Search Candidates", icon="🔎" + ) + + + +#### ------------------------ System Administrator (Kaelyn Dunn) Role ------------------------ +def AdminHomeNav(): + st.sidebar.page_link( + "pages/30_Admin_Home.py", label="Admin Dashboard", icon="⚙️" + ) + + +def AdminEmployersNav(): + st.sidebar.page_link( + "pages/31_Admin_Employers.py", label="Manage Employers", icon="🏭" + ) + + +def AdminPostingsNav(): + st.sidebar.page_link( + "pages/32_Admin_Postings.py", label="Review Job Postings", icon="✅" + ) + + +def AdminDEINav(): + st.sidebar.page_link( + "pages/33_Admin_DEI.py", label="DEI Metrics", icon="🌍" + ) + + +def AdminAnalyticsNav(): + st.sidebar.page_link( + "pages/34_Admin_Analytics.py", label="Platform Analytics", icon="📈" ) @@ -63,7 +132,7 @@ def SideBarLinks(show_home=False): """ # add a logo to the sidebar always - st.sidebar.image("assets/logo.png", width=150) + st.sidebar.image("assets/coopalyticslogo.png", width=300) # If there is no logged in user, redirect to the Home (Landing) page if "authenticated" not in st.session_state: @@ -77,21 +146,36 @@ def SideBarLinks(show_home=False): # Show the other page navigators depending on the users' role. if st.session_state["authenticated"]: - # Show World Bank Link and Map Demo Link if the user is a political strategy advisor role. - if st.session_state["role"] == "pol_strat_advisor": - PolStratAdvHomeNav() - WorldBankVizNav() - MapDemoNav() - - # If the user role is usaid worker, show the Api Testing page - if st.session_state["role"] == "usaid_worker": - PredictionNav() - ApiTestNav() - ClassificationNav() - - # If the user is an administrator, give them access to the administrator pages + # Student Navigation (Charlie Stout persona) + if st.session_state["role"] == "student": + StudentHomeNav() + StudentApplicationsNav() + StudentPositionsNav() + StudentCalendarNav() + StudentAnalyticsNav() + + # Advisor Navigation (Sarah Martinez persona) + if st.session_state["role"] == "advisor": + AdvisorHomeNav() + AdvisorStudentManagementNav() + AdvisorAnalyticsNav() + AdvisorCompaniesNav() + + # Employer Navigation (Phoebe Hwang persona) + if st.session_state["role"] == "employer": + EmployerHomeNav() + EmployerPostingsNav() + EmployerApplicationsNav() + EmployerCandidatesNav() + + # System Administrator Navigation (Kaelyn Dunn persona) if st.session_state["role"] == "administrator": - AdminPageNav() + AdminHomeNav() + AdminEmployersNav() + AdminPostingsNav() + AdminDEINav() + AdminAnalyticsNav() + # Always show the About page at the bottom of the list of links AboutPageNav() @@ -101,4 +185,4 @@ def SideBarLinks(show_home=False): if st.sidebar.button("Logout"): del st.session_state["role"] del st.session_state["authenticated"] - st.switch_page("Home.py") + st.switch_page("Home.py") \ No newline at end of file diff --git a/app/src/pages/00_Pol_Strat_Home.py b/app/src/pages/00_Pol_Strat_Home.py deleted file mode 100644 index 3d02f25552..0000000000 --- a/app/src/pages/00_Pol_Strat_Home.py +++ /dev/null @@ -1,25 +0,0 @@ -import logging -logger = logging.getLogger(__name__) - -import streamlit as st -from modules.nav import SideBarLinks - -st.set_page_config(layout = 'wide') - -# Show appropriate sidebar links for the role of the currently logged in user -SideBarLinks() - -st.title(f"Welcome Political Strategist, {st.session_state['first_name']}.") -st.write('') -st.write('') -st.write('### What would you like to do today?') - -if st.button('View World Bank Data Visualization', - type='primary', - use_container_width=True): - st.switch_page('pages/01_World_Bank_Viz.py') - -if st.button('View World Map Demo', - type='primary', - use_container_width=True): - st.switch_page('pages/02_Map_Demo.py') \ No newline at end of file diff --git a/app/src/pages/00_Student_Home.py b/app/src/pages/00_Student_Home.py new file mode 100644 index 0000000000..713c4b7eac --- /dev/null +++ b/app/src/pages/00_Student_Home.py @@ -0,0 +1,470 @@ +import logging +logging.basicConfig(format='%(filename)s:%(lineno)s:%(levelname)s -- %(message)s', level=logging.INFO) +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests + +st.set_page_config(layout='wide') +SideBarLinks() + +logger.info("Loading Student Home page") + +# Charlie Stout's userId from database +API_BASE_URL = "http://web-api:4000" + +# Get the user_id from session state +charlie_user_id = st.session_state.get("user_id", None) + +if charlie_user_id is None: + st.error("User not logged in. Please return to home and log in.") + st.stop() + +# Function to fetch user data from API +def fetch_user_data(user_id): + try: + response = requests.get(f"{API_BASE_URL}/users/{user_id}") + logger.info(f"Fetching user data from API: status_code={response.status_code}") + if response.status_code == 200: + data = response.json() + logger.info(f"User data received: {data}") + return data + else: + logger.error(f"Failed to fetch user data, status code: {response.status_code}, response: {response.text}") + return None + except Exception as e: + logger.error(f"Error fetching user data: {e}") + # Fallback data if API is not available + return { + 'userId': 1, + 'firstName': 'Charlie', + 'lastName': 'Stout', + 'email': 'c.stout@student.edu', + 'phone': '555-0101', + 'major': 'Computer Science', + 'minor': 'Mathematics', + 'college': 'Khoury College of Computer Sciences', + 'gradYear': '2026', + 'grade': 'Junior', + 'gender': None, + 'race': None, + 'nationality': None, + 'sexuality': None, + 'disability': None + } + +# Function to fetch user skills from API +def fetch_user_skills(user_id): + try: + response = requests.get(f"{API_BASE_URL}/users/{user_id}/skills") + if response.status_code == 200: + return response.json() + return [] + except Exception as e: + logger.error(f"Error fetching user skills: {e}") + return [] + +# Function to fetch application summary from API +def fetch_application_summary(user_id): + try: + response = requests.get(f"{API_BASE_URL}/student/{user_id}/applications/summary") + logger.info(f"Fetching application summary from API: status_code={response.status_code}") + if response.status_code == 200: + data = response.json() + logger.info(f"Application summary data received: {data}") + return data + else: + logger.warning(f"Failed to fetch application summary, status code: {response.status_code}") + return [] + except Exception as e: + logger.error(f"Error fetching application summary: {e}") + return [] + +# Function to fetch recent applications from API +def fetch_recent_applications(user_id): + try: + response = requests.get(f"{API_BASE_URL}/users/{user_id}/recent-applications") + if response.status_code == 200: + return response.json() + return [] + except Exception as e: + logger.error(f"Error fetching recent applications: {e}") + return [] + +# Function to update user data via API +def update_user_data(user_data): + try: + response = requests.put(f"{API_BASE_URL}/users", json=user_data) + return response.status_code == 200 + except Exception as e: + logger.error(f"Error updating user data: {e}") + return False + +# Function to fetch all available skills from API +def fetch_all_skills(): + try: + response = requests.get(f"{API_BASE_URL}/skills") + if response.status_code == 200: + return response.json() + return [] + except Exception as e: + logger.error(f"Error fetching all skills: {e}") + return [] + +# Function to update user skills +def update_user_skills(user_id, updated_skills, removed_skills): + try: + update_data = { + "updated_skills": list(updated_skills.values()), + "removed_skills": removed_skills + } + response = requests.put(f"{API_BASE_URL}/users/{user_id}/skills", json=update_data) + return response.status_code == 200 + except Exception as e: + logger.error(f"Error updating user skills: {e}") + return False + +# Function to add new skills to user profile +def add_user_skills(user_id, new_skills): + try: + response = requests.post(f"{API_BASE_URL}/users/{user_id}/skills", json={"skills": new_skills}) + return response.status_code == 200 + except Exception as e: + logger.error(f"Error adding user skills: {e}") + return False + +# Fetch user data and related information +user_data = fetch_user_data(charlie_user_id) +if isinstance(user_data, list) and len(user_data) > 0: + user_data = user_data[0] + +user_skills = fetch_user_skills(charlie_user_id) +app_summary = fetch_application_summary(charlie_user_id) +recent_applications = fetch_recent_applications(charlie_user_id) + +if user_data: + # Header + st.title("🎓 Student Dashboard") + st.subheader(f"Welcome back, {user_data['firstName']}!") + + # Create tabs for better organization + tab1, tab2, tab3 = st.tabs(["📋 Profile", "📊 Quick Stats", "🛠️ Skills Management"]) + + with tab1: + st.header("Your Profile") + + with st.form("profile_form"): + col1, col2 = st.columns(2) + + with col1: + st.subheader("Personal Information") + first_name = st.text_input("First Name", value=user_data.get("firstName", "")) + last_name = st.text_input("Last Name", value=user_data.get("lastName", "")) + email = st.text_input("Email", value=user_data.get("email", "")) + phone = st.text_input("Phone", value=user_data.get("phone", "")) + + with col2: + st.subheader("Academic Information") + major_options = ["Computer Science", "Data Science", "Information Systems", "Cybersecurity", + "Business", "Marketing", "Finance", "International Business", "Mechanical Engineering", + "Biomedical Engineering", "Electrical Engineering", "Environmental Engineering", + "Physics", "Biology", "Chemistry", "Psychology", "Design", "Mathematics", + "Economics", "Art", "Spanish", "Sociology", "History"] + + major_index = 0 + if user_data.get("major") in major_options: + major_index = major_options.index(user_data.get("major")) + major = st.selectbox("Major", major_options, index=major_index) + + minor_options = ["None"] + major_options + minor_index = 0 + if user_data.get("minor") in minor_options: + minor_index = minor_options.index(user_data.get("minor")) + minor = st.selectbox("Minor", minor_options, index=minor_index) + + college_options = ["College of Arts, Media and Design", "Bouvé College of Health Sciences", + "D'Amore-McKim School of Business", "Khoury College of Computer Sciences", + "College of Engineering", "College of Science", "College of Social Sciences and Humanities"] + college_index = 0 + current_college = user_data.get("college", "") + if current_college in college_options: + college_index = college_options.index(current_college) + college = st.selectbox("College", college_options, index=college_index) + + grad_year_options = ["2024", "2025", "2026", "2027"] + grad_year_index = 0 + if user_data.get("gradYear") in grad_year_options: + grad_year_index = grad_year_options.index(user_data.get("gradYear")) + grad_year = st.selectbox("Graduation Year", grad_year_options, index=grad_year_index) + + grade_options = ["Sophomore", "Junior", "Senior"] + grade_index = 0 + if user_data.get("grade") in grade_options: + grade_index = grade_options.index(user_data.get("grade")) + grade = st.selectbox("Current Grade", grade_options, index=grade_index) + + st.subheader("Demographics") + demo_col1, demo_col2 = st.columns(2) + + with demo_col1: + gender_options = ["Male", "Female", "Non-binary", "Prefer not to say", "Other"] + gender_index = 0 + if user_data.get("gender") in gender_options: + gender_index = gender_options.index(user_data.get("gender")) + gender = st.selectbox("Gender", gender_options, index=gender_index) + + race_options = ["White", "Asian", "Black/African American", "Hispanic/Latino", + "Native American", "Pacific Islander", "Mixed", "Prefer not to say"] + race_index = 0 + if user_data.get("race") in race_options: + race_index = race_options.index(user_data.get("race")) + race = st.selectbox("Race/Ethnicity", race_options, index=race_index) + + with demo_col2: + nationality_options = ["American", "International", "Prefer not to say"] + nationality_index = 0 + if user_data.get("nationality") in nationality_options: + nationality_index = nationality_options.index(user_data.get("nationality")) + nationality = st.selectbox("Nationality", nationality_options, index=nationality_index) + + sexuality_options = ["Heterosexual", "LGBTQ+", "Prefer not to say"] + sexuality_index = 0 + if user_data.get("sexuality") in sexuality_options: + sexuality_index = sexuality_options.index(user_data.get("sexuality")) + sexuality = st.selectbox("Sexual Orientation", sexuality_options, index=sexuality_index) + + disability_options = ["None", "ADHD", "Anxiety", "Dyslexia", "Depression", "Autism", "Prefer not to say"] + disability_index = 0 + if user_data.get("disability") in disability_options: + disability_index = disability_options.index(user_data.get("disability")) + disability = st.selectbox("Disability Status", disability_options, index=disability_index) + + submitted = st.form_submit_button("Update Profile", type="primary", use_container_width=True) + + if submitted: + update_data = { + "userId": charlie_user_id, + "firstName": first_name, + "lastName": last_name, + "email": email, + "phone": phone, + "major": major, + "minor": minor if minor != "None" else None, + "college": college, + "gradYear": grad_year, + "grade": grade, + "gender": gender, + "race": race, + "nationality": nationality, + "sexuality": sexuality, + "disability": disability if disability != "None" else None + } + + if update_user_data(update_data): + st.success("✅ Profile updated successfully!") + st.rerun() + else: + st.error("❌ Failed to update profile") + + + with tab2: + st.header("📊 Quick Stats") + + # Calculate metrics from real data + total_applications = sum(item.get('ApplicationCount', 0) for item in app_summary) if app_summary else 0 + under_review = next((item.get('ApplicationCount', 0) for item in app_summary if item.get('status') == 'Under Review'), 0) + submitted = next((item.get('ApplicationCount', 0) for item in app_summary if item.get('status') == 'Submitted'), 0) + + # Get GPA from most recent application + latest_gpa = "N/A" + if recent_applications: + latest_gpa = recent_applications[0].get('gpa', 'N/A') + + # Display metrics in a clean layout + metric_col1, metric_col2, metric_col3, metric_col4 = st.columns(4) + + with metric_col1: + st.metric(label="📝 Applications Submitted", value=str(total_applications), delta="Total") + + with metric_col2: + st.metric(label="👁️ Under Review", value=str(under_review), delta="Pending") + + with metric_col3: + st.metric(label="📄 Recently Submitted", value=str(submitted), delta="Awaiting Review") + + with metric_col4: + st.metric(label="⭐ GPA", value=str(latest_gpa), delta="Latest Application") + + # Skills section + st.subheader("🛠️ Your Skills Profile") + st.caption("Based on your profile and experience") + + if user_skills: + # Group skills by category + skills_by_category = {} + for skill in user_skills: + category = skill['category'] + if category not in skills_by_category: + skills_by_category[category] = [] + skills_by_category[category].append(skill) + + # Display skills in columns + categories = list(skills_by_category.keys()) + if len(categories) >= 3: + skill_col1, skill_col2, skill_col3 = st.columns(3) + cols = [skill_col1, skill_col2, skill_col3] + elif len(categories) == 2: + skill_col1, skill_col2 = st.columns(2) + cols = [skill_col1, skill_col2] + else: + cols = [st] + + for i, category in enumerate(categories): + col = cols[i % len(cols)] + with col: + st.markdown(f"**{category}**") + for skill in skills_by_category[category]: + proficiency = skill['proficiencyLevel'] + progress_value = proficiency / 5.0 # Convert 1-5 scale to 0-1 + + # Convert proficiency to text + proficiency_text = {1: "Beginner", 2: "Basic", 3: "Intermediate", 4: "Advanced", 5: "Expert"} + level_text = proficiency_text.get(proficiency, "Unknown") + + st.write(f"{skill['name']} ({level_text})") + st.progress(progress_value) + else: + st.info("No skills data available. Please contact your advisor to update your skills profile.") + + with tab3: + # Skills Management Section + st.header("🛠️ Skills Management") + + if user_skills: + # Group skills by category + skills_by_category = {} + for skill in user_skills: + category = skill['category'] + if category not in skills_by_category: + skills_by_category[category] = [] + skills_by_category[category].append(skill) + + # Create skills management form + with st.form("skills_form"): + st.subheader("📝 Edit Your Skills & Proficiency Levels") + + # Display skills grouped by category + updated_skills = {} + skills_to_remove = [] + + for category, skills in skills_by_category.items(): + st.markdown(f"**{category}**") + + for skill in skills: + col1, col2, col3 = st.columns([3, 2, 1]) + + with col1: + st.write(f"• {skill['name']}") + + with col2: + # Proficiency level slider (1-5) + proficiency = st.slider( + f"Level", + min_value=1, + max_value=5, + value=skill['proficiencyLevel'], + key=f"skill_{skill['skillId']}_proficiency", + help="1=Beginner, 2=Novice, 3=Intermediate, 4=Advanced, 5=Expert" + ) + updated_skills[skill['skillId']] = { + 'skillId': skill['skillId'], + 'proficiencyLevel': proficiency + } + + with col3: + # Remove skill checkbox + if st.checkbox("Remove", key=f"remove_skill_{skill['skillId']}"): + skills_to_remove.append(skill['skillId']) + + st.markdown("") # Add spacing between categories + + # Save skills changes button + skills_submitted = st.form_submit_button("💾 Save Skills Changes", type="primary", use_container_width=True) + + if skills_submitted: + # Filter out skills marked for removal + final_skills = {k: v for k, v in updated_skills.items() if k not in skills_to_remove} + + if update_user_skills(charlie_user_id, final_skills, skills_to_remove): + st.success("✅ Skills updated successfully!") + st.rerun() + else: + st.error("❌ Failed to update skills") + + # Add New Skills Section + st.markdown("---") + st.subheader("➕ Add New Skills") + + # Fetch all available skills for adding + all_skills = fetch_all_skills() + if all_skills: + # Filter out skills user already has + current_skill_ids = [skill['skillId'] for skill in user_skills] if user_skills else [] + available_skills = [skill for skill in all_skills if skill['skillId'] not in current_skill_ids] + + if available_skills: + with st.form("add_skills_form"): + # Group available skills by category for easier selection + available_by_category = {} + for skill in available_skills: + category = skill['category'] + if category not in available_by_category: + available_by_category[category] = [] + available_by_category[category].append(skill) + + selected_skills = [] + + for category, skills in available_by_category.items(): + st.markdown(f"**{category}**") + + for skill in skills: + col1, col2 = st.columns([3, 2]) + + with col1: + if st.checkbox(skill['name'], key=f"add_skill_{skill['skillId']}"): + with col2: + proficiency = st.slider( + "Proficiency", + min_value=1, + max_value=5, + value=3, + key=f"new_skill_{skill['skillId']}_proficiency", + help="1=Beginner, 2=Novice, 3=Intermediate, 4=Advanced, 5=Expert" + ) + selected_skills.append({ + 'skillId': skill['skillId'], + 'proficiencyLevel': proficiency + }) + + st.markdown("") # Add spacing + + # Add selected skills button + add_skills_submitted = st.form_submit_button("➕ Add Selected Skills", type="secondary", use_container_width=True) + + if add_skills_submitted and selected_skills: + if add_user_skills(charlie_user_id, selected_skills): + st.success(f"✅ Added {len(selected_skills)} new skills!") + st.rerun() + else: + st.error("❌ Failed to add skills") + elif add_skills_submitted and not selected_skills: + st.warning("⚠️ Please select at least one skill to add") + else: + st.info("🎉 You have all available skills! Great job!") + else: + st.error("❌ Unable to load available skills") + + +else: + st.error("Unable to load user data. Please try again later.") \ No newline at end of file diff --git a/app/src/pages/01_Student_Applications.py b/app/src/pages/01_Student_Applications.py new file mode 100644 index 0000000000..c84f827e86 --- /dev/null +++ b/app/src/pages/01_Student_Applications.py @@ -0,0 +1,12 @@ +import logging +logging.basicConfig(format='%(filename)s:%(lineno)s:%(levelname)s -- %(message)s', level=logging.INFO) +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests + +st.set_page_config(layout='wide') +SideBarLinks() + +logger.info("Loading Applications page") \ No newline at end of file diff --git a/app/src/pages/02_Student_Browse_Positions.py b/app/src/pages/02_Student_Browse_Positions.py new file mode 100644 index 0000000000..c415c56004 --- /dev/null +++ b/app/src/pages/02_Student_Browse_Positions.py @@ -0,0 +1,90 @@ +import logging +import streamlit as st +import requests +from modules.nav import SideBarLinks + +# Logging setup +logging.basicConfig(format='%(filename)s:%(lineno)s:%(levelname)s -- %(message)s', level=logging.INFO) +logger = logging.getLogger(__name__) + +# Page setup +st.set_page_config(layout='wide') +SideBarLinks() + +logger.info("Loading Coop Positions page") + +# Constants +API_BASE_URL = "http://web-api:4000" + +# Get user ID from session state +charlie_user_id = st.session_state.get("user_id", None) + +if charlie_user_id is None: + st.error("🚫 User not logged in. Please return to the home page and log in.") + st.stop() + +# Sidebar filter +filter_option = st.selectbox( + "View positions by:", + options=["All", "Liked", "Disliked", "Matches Desired Skills"] +) + +# Function to fetch and display positions +def fetch_positions(): + if filter_option == "All": + url = f"{API_BASE_URL}/positions" # ✔ this is from coopPositions blueprint + + elif filter_option == "Liked": + url = f"{API_BASE_URL}/vp/viewpos/{charlie_user_id}?preference=true" # ✔ matches blueprint + route + + elif filter_option == "Disliked": + url = f"{API_BASE_URL}/vp/viewpos/{charlie_user_id}?preference=false" + + elif filter_option == "Matches Desired Skills": + url = f"{API_BASE_URL}/{charlie_user_id}/desiredSkills" + else: + st.warning("Unknown filter selected.") + return [] + + try: + response = requests.get(url) + if response.status_code == 200: + return response.json() + else: + st.error(f"Failed to fetch data: {response.status_code}") + return [] + except requests.exceptions.RequestException as e: + st.error(f"API request error: {e}") + return [] + +# Load and display data +positions = fetch_positions() +for pos in positions: + with st.expander(pos["title"]): + st.write(f"**Location**: {pos.get('location', 'N/A')}") + st.write(f"**Description**: {pos.get('description', 'N/A')}") + st.write(f"**Pay**: ${pos.get('hourlyPay', 'N/A')}/hr") + + # Like/Dislike buttons + col1, col2 = st.columns([1, 1]) + with col1: + if st.button("👍 Like", key=f"like_{pos['coopPositionId']}"): + response = requests.post(f"{API_BASE_URL}/vp/position", json={ + "studentId": charlie_user_id, + "coopPositionId": pos["coopPositionId"], + "preference": True + }) + if response.status_code == 200: + st.success("Marked as liked.") + st.rerun() + else: + st.error("Failed to save preference.") + + with col2: + if st.button("👎 Dislike", key=f"dislike_{pos['coopPositionId']}"): + requests.post(f"{API_BASE_URL}/vp/position", json={ + "studentId": charlie_user_id, + "coopPositionId": pos["coopPositionId"], + "preference": False + }) + st.warning("Marked as disliked.") \ No newline at end of file diff --git a/app/src/pages/03_Student_Analytics.py b/app/src/pages/03_Student_Analytics.py new file mode 100644 index 0000000000..050d3cabff --- /dev/null +++ b/app/src/pages/03_Student_Analytics.py @@ -0,0 +1,107 @@ +import logging +logging.basicConfig(format='%(filename)s:%(lineno)s:%(levelname)s -- %(message)s', level=logging.INFO) +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests +import pandas as pd + +st.set_page_config(layout='wide') +SideBarLinks() + +st.title("Student Analytics Dashboard") +st.markdown("---") + + +API_BASE_URL = "http://web-api:4000" +WAGE_DATA_ENDPOINT = f"{API_BASE_URL}/workedatpos/wagedata" + +def fetch_wage_data(): + """Fetch wage data from the REST API""" + try: + response = requests.get(WAGE_DATA_ENDPOINT, timeout=10) + if response.status_code == 200: + return response.json() + else: + st.error(f"Failed to fetch data: {response.status_code}") + return [] + except requests.exceptions.RequestException as e: + st.error(f"Error connecting to API: {str(e)}") + return [] + +wage_data = fetch_wage_data() + +if wage_data: + df = pd.DataFrame(wage_data) + col1, col2, col3, col4 = st.columns(4) + + with col1: + st.metric("Total Positions", len(df)) + + with col2: + avg_pay = df['avgPay'].mean() if 'avgPay' in df.columns else 0 + st.metric("Average Pay", f"${avg_pay:.2f}/hr") + + with col3: + max_pay = df['maxSalary'].max() if 'maxSalary' in df.columns else 0 + st.metric("Highest Pay", f"${max_pay:.2f}/hr") + + with col4: + total_coops = df['numPreviousCoops'].sum() if 'numPreviousCoops' in df.columns else 0 + st.metric("Total Previous Co-ops", total_coops) + + st.markdown("---") + + # Display the wage data table + st.subheader("Co-op Position Wage Data") + st.markdown("Data from past co-op positions showing company names, position titles, and salary ranges.") + + # Format the data for better display + if not df.empty: + # Rename columns for better display + display_df = df.copy() + if 'companyName' in display_df.columns: + display_df = display_df.rename(columns={'companyName': 'Company Name'}) + if 'positionTitle' in display_df.columns: + display_df = display_df.rename(columns={'positionTitle': 'Position Title'}) + if 'minSalary' in display_df.columns: + display_df = display_df.rename(columns={'minSalary': 'Min Salary ($/hr)'}) + if 'maxSalary' in display_df.columns: + display_df = display_df.rename(columns={'maxSalary': 'Max Salary ($/hr)'}) + if 'avgPay' in display_df.columns: + display_df = display_df.rename(columns={'avgPay': 'Average Pay ($/hr)'}) + if 'numPreviousCoops' in display_df.columns: + display_df = display_df.rename(columns={'numPreviousCoops': 'Previous Co-ops'}) + + # Format salary columns to 2 decimal places + salary_columns = ['Min Salary ($/hr)', 'Max Salary ($/hr)', 'Average Pay ($/hr)'] + for col in salary_columns: + if col in display_df.columns: + display_df[col] = display_df[col].round(2) + + # Display the table + st.dataframe(display_df, use_container_width=True) + + # Add some visualizations + st.markdown("---") + st.subheader("Pay Distribution Analysis") + + col1, col2 = st.columns(2) + + with col1: + if 'Average Pay ($/hr)' in display_df.columns: + st.bar_chart(display_df.set_index('Position Title')['Average Pay ($/hr)'].head(10)) + st.caption("Top 10 Positions by Average Pay") + + with col2: + if 'Company Name' in display_df.columns and 'Average Pay ($/hr)' in display_df.columns: + company_avg = display_df.groupby('Company Name')['Average Pay ($/hr)'].mean().sort_values(ascending=False).head(10) + st.bar_chart(company_avg) + st.caption("Top 10 Companies by Average Pay") + +else: + st.warning("No wage data available. Please check if the API is running and accessible.") + st.info("Make sure the backend API is running on port 4000.") + + diff --git a/app/src/pages/04_Student_Calendar.py b/app/src/pages/04_Student_Calendar.py new file mode 100644 index 0000000000..bcc159e611 --- /dev/null +++ b/app/src/pages/04_Student_Calendar.py @@ -0,0 +1,73 @@ +import logging +import streamlit as st +import requests +import pandas as pd +from modules.nav import SideBarLinks + +# Logging setup +logging.basicConfig(format='%(filename)s:%(lineno)s:%(levelname)s -- %(message)s', level=logging.INFO) +logger = logging.getLogger(__name__) + +# Page setup +st.set_page_config(layout='wide') +SideBarLinks() + +logger.info("Loading Calendar page") + +# Constants +API_BASE_URL = "http://web-api:4000" + +# Get user ID from session state +charlie_user_id = st.session_state.get("user_id", None) + +if charlie_user_id is None: + st.error("🚫 User not logged in. Please return to the home page and log in.") + st.stop() + +# Fetch deadlines for flagged (preferred) positions +def fetch_flagged_deadlines(user_id): + try: + url = f"{API_BASE_URL}/views_position/{user_id}/deadlines" + logger.info(f"Fetching deadlines from: {url}") + response = requests.get(url) + + if response.status_code == 200: + deadlines = response.json() + logger.info(f"Fetched {len(deadlines)} deadline entries.") + return deadlines + else: + logger.error(f"Failed to fetch deadlines: {response.status_code} {response.text}") + return [] + except Exception as e: + logger.error(f"Exception occurred while fetching deadlines: {e}") + return [] + +# UI +st.title("📅 Your Position Deadline Calendar") + +# Fetch data +deadlines = fetch_flagged_deadlines(charlie_user_id) + +if deadlines: + st.subheader("🔖 Flagged Positions and Their Deadlines") + + # Convert to DataFrame + df = pd.DataFrame(deadlines) + df['deadline'] = pd.to_datetime(df['deadline']) + df = df.sort_values(by='deadline') + + # Show nicely formatted list + for _, row in df.iterrows(): + st.markdown(f"🔹 **{row['title']}** — 🗓️ Deadline: `{row['deadline'].date()}`") + + st.markdown("---") + st.subheader("📋 Tabular View") + + # Show table + st.dataframe(df.rename(columns={ + "title": "Position Title", + "deadline": "Application Deadline" + }), use_container_width=True) +else: + st.info("📭 You haven’t flagged any positions yet. Flag some positions to see their deadlines here!") + diff --git a/app/src/pages/10_Advisor_Home.py b/app/src/pages/10_Advisor_Home.py new file mode 100644 index 0000000000..f1541a9e29 --- /dev/null +++ b/app/src/pages/10_Advisor_Home.py @@ -0,0 +1,219 @@ +import logging +logging.basicConfig(format='%(filename)s:%(lineno)s:%(levelname)s -- %(message)s', level=logging.INFO) +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests + +st.set_page_config(layout='wide') +SideBarLinks() + +logger.info("Loading Advisor Home page") + +# API configuration +API_BASE_URL = "http://web-api:4000" + +# Get the user_id from session state +advisor_user_id = st.session_state.get("user_id", None) + +if advisor_user_id is None: + st.error("User not logged in. Please return to home and log in.") + st.stop() + +# Function to fetch advisor data from API +def fetch_advisor_data(user_id): + try: + response = requests.get(f"{API_BASE_URL}/users/{user_id}") + logger.info(f"Fetching advisor data from API: status_code={response.status_code}") + if response.status_code == 200: + data = response.json() + return data[0] if data else None + return None + except Exception as e: + logger.error(f"Error fetching advisor data: {e}") + # Fallback data if API is not available + return { + 'userId': 31, + 'firstName': 'Sarah', + 'lastName': 'Martinez', + 'email': 's.martinez@neu.edu', + 'phone': '555-0301', + 'college': 'NEU', + 'industry': 'Academic', + 'gender': 'Female', + 'race': 'Hispanic', + 'nationality': 'American', + 'sexuality': 'Heterosexual', + 'disability': None + } + +# Function to fetch advisor's assigned students from API +def fetch_advisor_students(advisor_id): + try: + response = requests.get(f"{API_BASE_URL}/advisors/{advisor_id}/students") + logger.info(f"Fetching advisor students from API: status_code={response.status_code}") + if response.status_code == 200: + return response.json() + return [] + except Exception as e: + logger.error(f"Error fetching advisor students: {e}") + # Fallback data if API is not available + return [ + { + 'userId': 1, + 'firstName': 'Charlie', + 'lastName': 'Stout', + 'email': 'c.stout@student.edu', + 'phone': '555-0101', + 'major': 'Computer Science', + 'minor': 'Mathematics', + 'college': 'Khoury College of Computer Sciences', + 'gradYear': '2026', + 'grade': 'Junior' + }, + { + 'userId': 2, + 'firstName': 'Liam', + 'lastName': 'Williams', + 'email': 'l.williams@student.edu', + 'phone': '555-0102', + 'major': 'Business', + 'minor': 'Economics', + 'college': 'D\'Amore-McKim School of Business', + 'gradYear': '2025', + 'grade': 'Senior' + }, + { + 'userId': 3, + 'firstName': 'Sophia', + 'lastName': 'Brown', + 'email': 's.brown@student.edu', + 'phone': '555-0103', + 'major': 'Mechanical Engineering', + 'minor': 'Physics', + 'college': 'College of Engineering', + 'gradYear': '2027', + 'grade': 'Sophomore' + } + ] + +# Function to fetch student application statistics +def fetch_student_application_stats(student_id): + try: + response = requests.get(f"{API_BASE_URL}/student/{student_id}/applications/summary") + logger.info(f"Fetching student application stats from API: status_code={response.status_code}") + if response.status_code == 200: + data = response.json() + logger.info(f"Student application stats received: {data}") + return data + else: + logger.warning(f"Failed to fetch student application stats, status code: {response.status_code}") + return [] + except Exception as e: + logger.error(f"Error fetching student application stats: {e}") + return [] + +# Function to update advisor data +def update_advisor_data(advisor_id, advisor_data): + try: + response = requests.put(f"{API_BASE_URL}/advisors/{advisor_id}/profile", json=advisor_data) + logger.info(f"Updating advisor profile: status_code={response.status_code}") + return response.status_code == 200 + except Exception as e: + logger.error(f"Error updating advisor data: {e}") + return False + +# Function to update student flag status +def update_student_flag(advisor_id, student_id, flagged): + try: + response = requests.put(f"{API_BASE_URL}/advisors/{advisor_id}/students/{student_id}/flag", + json={"flagged": flagged}) + logger.info(f"Updating student flag: status_code={response.status_code}") + return response.status_code == 200 + except Exception as e: + logger.error(f"Error updating student flag: {e}") + return False + +# Fetch data +advisor_data = fetch_advisor_data(advisor_user_id) +advisor_students = fetch_advisor_students(advisor_user_id) + +if advisor_data: + + st.header("Your Advisor Profile") + + with st.form("advisor_profile_form"): + st.subheader("Personal Information") + col1, col2 = st.columns(2) + + with col1: + first_name = st.text_input("First Name", value=advisor_data.get("firstName", "")) + last_name = st.text_input("Last Name", value=advisor_data.get("lastName", "")) + + with col2: + email = st.text_input("Email", value=advisor_data.get("email", "")) + phone = st.text_input("Phone", value=advisor_data.get("phone", "")) + + st.subheader("Demographics") + demo_col1, demo_col2 = st.columns(2) + + with demo_col1: + gender_options = ["Male", "Female", "Non-binary", "Prefer not to say", "Other"] + gender_index = 0 + if advisor_data.get("gender") in gender_options: + gender_index = gender_options.index(advisor_data.get("gender")) + gender = st.selectbox("Gender", gender_options, index=gender_index) + + race_options = ["White", "Asian", "Black/African American", "Hispanic/Latino", + "Native American", "Pacific Islander", "Mixed", "Prefer not to say"] + race_index = 0 + if advisor_data.get("race") in race_options: + race_index = race_options.index(advisor_data.get("race")) + race = st.selectbox("Race/Ethnicity", race_options, index=race_index) + + with demo_col2: + nationality_options = ["American", "International", "Prefer not to say"] + nationality_index = 0 + if advisor_data.get("nationality") in nationality_options: + nationality_index = nationality_options.index(advisor_data.get("nationality")) + nationality = st.selectbox("Nationality", nationality_options, index=nationality_index) + + sexuality_options = ["Heterosexual", "LGBTQ+", "Prefer not to say"] + sexuality_index = 0 + if advisor_data.get("sexuality") in sexuality_options: + sexuality_index = sexuality_options.index(advisor_data.get("sexuality")) + sexuality = st.selectbox("Sexual Orientation", sexuality_options, index=sexuality_index) + + disability_options = ["None", "ADHD", "Anxiety", "Dyslexia", "Depression", "Autism", "Prefer not to say"] + disability_index = 0 + if advisor_data.get("disability") in disability_options: + disability_index = disability_options.index(advisor_data.get("disability")) + disability = st.selectbox("Disability Status", disability_options, index=disability_index) + + submitted = st.form_submit_button("Update Profile", type="primary", use_container_width=True) + + if submitted: + update_data = { + "userId": advisor_user_id, + "firstName": first_name, + "lastName": last_name, + "email": email, + "phone": phone, + "gender": gender, + "race": race, + "nationality": nationality, + "sexuality": sexuality, + "disability": disability if disability != "None" else None + } + + if update_advisor_data(advisor_user_id, update_data): + st.success("✅ Profile updated successfully!") + st.rerun() + else: + st.error("❌ Failed to update profile") + + +else: + st.error("Unable to load advisor data. Please try again later.") + st.info("If this problem persists, please contact the system administrator.") \ No newline at end of file diff --git a/app/src/pages/10_USAID_Worker_Home.py b/app/src/pages/10_USAID_Worker_Home.py deleted file mode 100644 index d7b230384c..0000000000 --- a/app/src/pages/10_USAID_Worker_Home.py +++ /dev/null @@ -1,30 +0,0 @@ -import logging -logger = logging.getLogger(__name__) - -import streamlit as st -from modules.nav import SideBarLinks - -st.set_page_config(layout = 'wide') - -# Show appropriate sidebar links for the role of the currently logged in user -SideBarLinks() - -st.title(f"Welcome USAID Worker, {st.session_state['first_name']}.") -st.write('') -st.write('') -st.write('### What would you like to do today?') - -if st.button('Predict Value Based on Regression Model', - type='primary', - use_container_width=True): - st.switch_page('pages/11_Prediction.py') - -if st.button('View the Simple API Demo', - type='primary', - use_container_width=True): - st.switch_page('pages/12_API_Test.py') - -if st.button("View Classification Demo", - type='primary', - use_container_width=True): - st.switch_page('pages/13_Classification.py') \ No newline at end of file diff --git a/app/src/pages/11_Advisor_Analytics.py b/app/src/pages/11_Advisor_Analytics.py new file mode 100644 index 0000000000..16a64c7ef9 --- /dev/null +++ b/app/src/pages/11_Advisor_Analytics.py @@ -0,0 +1,315 @@ +import logging +logging.basicConfig(format='%(filename)s:%(lineno)s:%(levelname)s -- %(message)s', level=logging.INFO) +logger = logging.getLogger(__name__) + +import streamlit as st +import plotly.graph_objects as go +import pandas as pd +import requests +from modules.nav import SideBarLinks + +st.set_page_config(layout='wide') +SideBarLinks() + +logger.info("Loading Advisor Analytics page") + +# API configuration +API_BASE_URL = "http://web-api:4000" + +# Get the user_id from session state +advisor_user_id = st.session_state.get("user_id", None) + +if advisor_user_id is None: + st.error("User not logged in. Please return to home and log in.") + st.stop() + +# Function to fetch advisor data from API +def fetch_advisor_data(user_id): + try: + response = requests.get(f"{API_BASE_URL}/users/{user_id}") + logger.info(f"Fetching advisor data from API: status_code={response.status_code}") + if response.status_code == 200: + data = response.json() + logger.info(f"Received advisor data: {data}") + return data[0] if data and len(data) > 0 else None + else: + logger.warning(f"API returned status {response.status_code}") + return None + except Exception as e: + logger.error(f"Error fetching advisor data: {e}") + # Fallback data if API is not available + return { + 'userId': 31, + 'firstName': 'Sarah', + 'lastName': 'Martinez', + 'email': 's.martinez@neu.edu' + } + +# Function to fetch placement analytics data +def fetch_placement_analytics(advisor_id): + # Define fallback sample data for demonstration + fallback_data = [ + { + 'firstName': 'Charlie', 'lastName': 'Stout', 'gradYear': '2026', 'major': 'Computer Science', + 'college': 'Khoury College of Computer Sciences', 'gpa': 3.8, 'status': 'Accepted', + 'positionTitle': 'Software Engineer Intern', 'salary': 75000, 'companyName': 'TechCorp', + 'industry': 'Technology' + }, + { + 'firstName': 'Isabella', 'lastName': 'Anderson', 'gradYear': '2025', 'major': 'Business Administration', + 'college': "D'Amore-McKim School of Business", 'gpa': 3.6, 'status': 'Rejected', + 'positionTitle': 'Marketing Analyst', 'salary': 65000, 'companyName': 'MarketPro', + 'industry': 'Marketing' + }, + { + 'firstName': 'Liam', 'lastName': 'Williams', 'gradYear': '2025', 'major': 'Mechanical Engineering', + 'college': 'College of Engineering', 'gpa': 3.9, 'status': 'Accepted', + 'positionTitle': 'Engineering Intern', 'salary': 70000, 'companyName': 'EngineerCorp', + 'industry': 'Manufacturing' + }, + { + 'firstName': 'Sophia', 'lastName': 'Brown', 'gradYear': '2027', 'major': 'Data Science', + 'college': 'Khoury College of Computer Sciences', 'gpa': 3.7, 'status': 'Accepted', + 'positionTitle': 'Data Analyst', 'salary': 68000, 'companyName': 'DataFlow Analytics', + 'industry': 'Technology' + }, + { + 'firstName': 'Emma', 'lastName': 'Davis', 'gradYear': '2026', 'major': 'Finance', + 'college': "D'Amore-McKim School of Business", 'gpa': 3.5, 'status': 'Rejected', + 'positionTitle': 'Financial Analyst', 'salary': 72000, 'companyName': 'FinanceFirst', + 'industry': 'Finance' + }, + { + 'firstName': 'Noah', 'lastName': 'Miller', 'gradYear': '2025', 'major': 'Computer Science', + 'college': 'Khoury College of Computer Sciences', 'gpa': 3.4, 'status': 'Accepted', + 'positionTitle': 'Full Stack Developer', 'salary': 80000, 'companyName': 'WebSolutions', + 'industry': 'Technology' + } + ] + + try: + response = requests.get(f"{API_BASE_URL}/advisors/{advisor_id}/analytics/placement-data") + logger.info(f"Fetching placement analytics from API: status_code={response.status_code}") + if response.status_code == 200: + api_data = response.json() + logger.info(f"Received placement data: {len(api_data) if api_data else 0} records") + if api_data: # If API returns data, use it + logger.info(f"Successfully fetched {len(api_data)} placement records from API") + return api_data + else: # If API returns empty array, use fallback data + logger.info("API returned empty data, using fallback sample data") + return fallback_data + else: + # API returned error status, use fallback data + logger.warning(f"API returned status {response.status_code}, using fallback sample data") + return fallback_data + except Exception as e: + logger.error(f"Error fetching placement analytics: {e}") + logger.info("Using fallback sample data due to API error") + return fallback_data + +# Fetch data +advisor_data = fetch_advisor_data(advisor_user_id) +placement_data = fetch_placement_analytics(advisor_user_id) + + + +if advisor_data: + # Header + st.title("📊 Student Analytics Dashboard") + st.subheader(f"Welcome back, {advisor_data['firstName']}!") + + if placement_data: + # Convert to DataFrame for easier manipulation + df = pd.DataFrame(placement_data) + + # Create two-column layout: sidebar (25%) and main content (75%) + sidebar_col, main_col = st.columns([1, 3]) + + with sidebar_col: + st.markdown("### 🔍 Filter Controls") + + # Extract unique values for filters + unique_grad_years = sorted(df['gradYear'].unique().tolist()) + unique_colleges = sorted(df['college'].unique().tolist()) + unique_majors = sorted(df['major'].unique().tolist()) + unique_industries = sorted(df['industry'].unique().tolist()) + + # Filter controls with "All" option + grad_year_options = ["All"] + unique_grad_years + selected_grad_years = st.multiselect( + "Graduation Year", + options=grad_year_options, + default=["All"], + help="Select graduation years to include in analysis", + key="grad_years_filter" + ) + + college_options = ["All"] + unique_colleges + selected_colleges = st.multiselect( + "Department/College", + options=college_options, + default=["All"], + help="Select colleges/departments to include", + key="colleges_filter" + ) + + major_options = ["All"] + unique_majors + selected_majors = st.multiselect( + "Major", + options=major_options, + default=["All"], + help="Select student majors to include", + key="majors_filter" + ) + + industry_options = ["All"] + unique_industries + selected_industries = st.multiselect( + "Industry", + options=industry_options, + default=["All"], + help="Select job industries to include", + key="industries_filter" + ) + + gpa_range = st.slider( + "GPA Range", + min_value=0.0, + max_value=4.0, + value=(0.0, 4.0), + step=0.1, + help="Select GPA range for filtering students", + key="gpa_range_filter" + ) + + with main_col: + # Apply filters to data with real-time updates + # Handle "All" option for each filter + if "All" in selected_grad_years: + grad_year_filter = unique_grad_years + else: + grad_year_filter = selected_grad_years + + if "All" in selected_colleges: + college_filter = unique_colleges + else: + college_filter = selected_colleges + + if "All" in selected_majors: + major_filter = unique_majors + else: + major_filter = selected_majors + + if "All" in selected_industries: + industry_filter = unique_industries + else: + industry_filter = selected_industries + + # Apply filters to DataFrame + filtered_df = df[ + (df['gradYear'].isin(grad_year_filter)) & + (df['college'].isin(college_filter)) & + (df['major'].isin(major_filter)) & + (df['industry'].isin(industry_filter)) & + (df['gpa'] >= gpa_range[0]) & + (df['gpa'] <= gpa_range[1]) + ] + + if not filtered_df.empty: + # Summary statistics + total_records = len(filtered_df) + accepted_applications = len(filtered_df[filtered_df['status'] == 'Accepted']) + completed_coops = len(filtered_df[filtered_df['status'] == 'Completed']) + success_rate = ((accepted_applications + completed_coops) / total_records) * 100 if total_records > 0 else 0 + avg_hourly_successful = filtered_df[filtered_df['status'].isin(['Accepted', 'Completed'])]['salary'].mean() + + # Display summary statistics + stat_col1, stat_col2, stat_col3 = st.columns(3) + with stat_col1: + st.metric("Total Records", total_records) + with stat_col2: + st.metric("Success Rate", f"{success_rate:.1f}%") + with stat_col3: + if not pd.isna(avg_hourly_successful): + st.metric("Avg Hourly Pay (Successful)", f"${avg_hourly_successful:.2f}") + else: + st.metric("Avg Hourly Pay (Successful)", "N/A") + + st.markdown("---") + + # Create interactive scatterplot + fig = go.Figure() + + # Add successful experiences (green dots) - both Accepted applications and Completed co-ops + successful_data = filtered_df[filtered_df['status'].isin(['Accepted', 'Completed'])] + if not successful_data.empty: + fig.add_trace(go.Scatter( + x=successful_data['gpa'], + y=successful_data['salary'], + mode='markers', + marker=dict(color='green', size=10, opacity=0.7), + name='Accepted Apps & Completed Co-ops', + hovertemplate='%{customdata[0]} %{customdata[1]}
' + + 'GPA: %{x:.2f}
' + + 'Position: %{customdata[2]}
' + + 'Company: %{customdata[3]}
' + + 'Hourly Pay: $%{y:.2f}
' + + 'Status: %{customdata[4]}', + customdata=successful_data[['firstName', 'lastName', 'positionTitle', 'companyName', 'status']].values + )) + + # Add rejected applications (red dots) + rejected_data = filtered_df[filtered_df['status'] == 'Rejected'] + if not rejected_data.empty: + fig.add_trace(go.Scatter( + x=rejected_data['gpa'], + y=rejected_data['salary'], + mode='markers', + marker=dict(color='red', size=10, opacity=0.7), + name='Rejected Applications', + hovertemplate='%{customdata[0]} %{customdata[1]}
' + + 'GPA: %{x:.2f}
' + + 'Position: %{customdata[2]}
' + + 'Company: %{customdata[3]}
' + + 'Hourly Pay: $%{y:.2f}
' + + 'Status: %{customdata[4]}', + customdata=rejected_data[['firstName', 'lastName', 'positionTitle', 'companyName', 'status']].values + )) + + # Update layout + fig.update_layout( + title="Student Placement Analytics: GPA vs Hourly Pay", + xaxis_title="Student GPA", + yaxis_title="Hourly Pay (USD)", + yaxis=dict(tickformat='$,.2f'), + xaxis=dict(range=[0, 4.0]), + height=600, + showlegend=True, + legend=dict( + orientation="h", + yanchor="bottom", + y=1.02, + xanchor="right", + x=1 + ) + ) + + # Display the plot + st.plotly_chart(fig, use_container_width=True) + + # Legend explanation + st.markdown(""" + **Legend:** 🟢 Green = Accepted Applications & Completed Co-ops | 🔴 Red = Rejected Applications + + **How to use:** Hover over data points to see detailed information about each application or completed co-op. + Use the filter controls on the left to focus on specific student groups or criteria. + """) + + else: + st.warning("No data matches the selected filters. Please adjust your filter criteria.") + else: + st.info("No placement data available for analysis.") + +else: + st.error("Unable to load advisor data. Please try again later.") + st.info("If this problem persists, please contact the system administrator.") \ No newline at end of file diff --git a/app/src/pages/11_Prediction.py b/app/src/pages/11_Prediction.py deleted file mode 100644 index a5a322a2f4..0000000000 --- a/app/src/pages/11_Prediction.py +++ /dev/null @@ -1,38 +0,0 @@ -import logging -logger = logging.getLogger(__name__) - -import streamlit as st -from modules.nav import SideBarLinks -import requests - -st.set_page_config(layout = 'wide') - -# Display the appropriate sidebar links for the role of the logged in user -SideBarLinks() - -st.title('Prediction with Regression') - -# create a 2 column layout -col1, col2 = st.columns(2) - -# add one number input for variable 1 into column 1 -with col1: - var_01 = st.number_input('Variable 01:', - step=1) - -# add another number input for variable 2 into column 2 -with col2: - var_02 = st.number_input('Variable 02:', - step=1) - -logger.info(f'var_01 = {var_01}') -logger.info(f'var_02 = {var_02}') - -# add a button to use the values entered into the number field to send to the -# prediction function via the REST API -if st.button('Calculate Prediction', - type='primary', - use_container_width=True): - results = requests.get(f'http://api:4000/c/prediction/{var_01}/{var_02}').json() - st.dataframe(results) - \ No newline at end of file diff --git a/app/src/pages/12_Advisor_Companies.py b/app/src/pages/12_Advisor_Companies.py new file mode 100644 index 0000000000..e8720e1f97 --- /dev/null +++ b/app/src/pages/12_Advisor_Companies.py @@ -0,0 +1,181 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests + +st.set_page_config(layout='wide') + +SideBarLinks() + +# API endpoint configuration +API_BASE_URL = "http://web-api:4000" +RATING_ENDPOINT = f"{API_BASE_URL}/workedatpos/company-ratings" +ALL_COMPANIES_ENDPOINT = f"{API_BASE_URL}/companyProfiles" + +st.title('Company Partnerships') + +st.markdown(""" +This page displays company partnerships sorted by their average student ratings. +Companies with higher ratings appear first, showing detailed statistics including min/max ratings and total ratings. +""") + +def fetch_company_ratings(): + """Fetch company profiles sorted by rating from the API""" + try: + response = requests.get(RATING_ENDPOINT, timeout=10) + if response.status_code == 200: + data = response.json() + return data + else: + st.error(f"Failed to fetch rating data: {response.status_code}") + st.error(f"Response: {response.text}") + return [] + except requests.exceptions.RequestException as e: + st.error(f"Error connecting to API: {str(e)}") + return [] + except Exception as e: + st.error(f"Unexpected error: {str(e)}") + return [] + +def fetch_all_companies(): + """Fetch all company profiles from the API""" + try: + response = requests.get(ALL_COMPANIES_ENDPOINT, timeout=10) + if response.status_code == 200: + data = response.json() + return data + else: + st.error(f"Failed to fetch company data: {response.status_code}") + st.error(f"Response: {response.text}") + return [] + except requests.exceptions.RequestException as e: + st.error(f"Error connecting to API: {str(e)}") + return [] + except Exception as e: + st.error(f"Unexpected error: {str(e)}") + return [] + +def display_company_ratings(): + """Display company ratings in a table format""" + # Fetch data from API + with st.spinner("Fetching company data..."): + rated_companies = fetch_company_ratings() + all_companies = fetch_all_companies() + + # Display summary statistics + if rated_companies: + total_rated = len(rated_companies) + + # Ensure ratings are converted to float and handle any None values + ratings = [] + for comp in rated_companies: + rating = comp.get('avgRating') + if rating is not None: + try: + ratings.append(float(rating)) + except (ValueError, TypeError): + continue + + if ratings: + avg_rating = sum(ratings) / len(ratings) + top_company = max(rated_companies, key=lambda x: float(x.get('avgRating', 0)) if x.get('avgRating') is not None else 0) + else: + avg_rating = 0 + top_company = rated_companies[0] if rated_companies else None + + col1, col2, col3 = st.columns(3) + with col1: + st.metric("Companies with Ratings", total_rated) + with col2: + st.metric("Overall Average Rating", f"{avg_rating:.1f}/5.0") + with col3: + company_name = top_company.get('companyName', 'N/A') if top_company else 'N/A' + st.metric("Top Rated Company", company_name) + + st.divider() + + # Add filtering options + if rated_companies: + # Since workedatpos endpoint doesn't have industry, we'll skip industry filtering + filtered_companies = rated_companies + else: + filtered_companies = [] + + # Display companies with ratings + if filtered_companies: + st.subheader("🏆 Highest Performing Companies") + st.markdown("*Sorted by average rating by past coops (highest to lowest)*") + + # Create a DataFrame-like display using Streamlit + col1, col2, col3, col4, col5, col6, col7 = st.columns([1, 3, 2, 1, 1, 1, 1]) + + with col1: + st.write("**Company ID**") + with col2: + st.write("**Company Name**") + with col3: + st.write("**Industry**") + with col4: + st.write("**Avg Rating**") + with col5: + st.write("**# of Ratings**") + with col6: + st.write("**Min**") + with col7: + st.write("**Max**") + + st.divider() + + for company in filtered_companies: + col1, col2, col3, col4, col5, col6, col7 = st.columns([1, 3, 2, 1, 1, 1, 1]) + + with col1: + st.write(company.get('companyProfileId', 'N/A')) + with col2: + st.write(f"**{company.get('companyName', 'N/A')}**") + with col3: + st.write(company.get('companyIndustry', 'N/A')) + with col4: + avg_rating = company.get('avgRating', 0) + if avg_rating is not None: + try: + avg_rating = float(avg_rating) + # Display rating with color coding + if avg_rating >= 4.0: + st.success(f"{avg_rating:.1f}/5.0 ⭐") + elif avg_rating >= 3.0: + st.info(f"{avg_rating:.1f}/5.0") + else: + st.warning(f"{avg_rating:.1f}/5.0") + except (ValueError, TypeError): + st.write("Invalid rating") + else: + st.write("No ratings") + with col5: + total_ratings = company.get('totalRatings', 0) + st.write(f"{total_ratings}") + with col6: + min_rating = company.get('minRating', 'N/A') + if min_rating is not None: + st.write(f"{min_rating:.1f}") + else: + st.write("N/A") + with col7: + max_rating = company.get('maxRating', 'N/A') + if max_rating is not None: + st.write(f"{max_rating:.1f}") + else: + st.write("N/A") + + st.divider() + +# Main content +try: + display_company_ratings() +except Exception as e: + st.error(f"An error occurred: {str(e)}") + logger.error(f"Error in display_company_ratings: {str(e)}") + + diff --git a/app/src/pages/13_Advisor_StudentManagement.py b/app/src/pages/13_Advisor_StudentManagement.py new file mode 100644 index 0000000000..527709e6cc --- /dev/null +++ b/app/src/pages/13_Advisor_StudentManagement.py @@ -0,0 +1,297 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests + +st.set_page_config(layout='wide') + +SideBarLinks() + +st.header("👥 Student Management") + +logger.info("Loading Advisor Student Management page") + +# API configuration +API_BASE_URL = "http://web-api:4000" + +# Get the user_id from session state +advisor_user_id = st.session_state.get("user_id", None) + +if advisor_user_id is None: + st.error("User not logged in. Please return to home and log in.") + st.stop() + +# Function to fetch advisor data from API +def fetch_advisor_data(user_id): + try: + response = requests.get(f"{API_BASE_URL}/users/{user_id}") + logger.info(f"Fetching advisor data from API: status_code={response.status_code}") + if response.status_code == 200: + data = response.json() + return data[0] if data else None + return None + except Exception as e: + logger.error(f"Error fetching advisor data: {e}") + # Fallback data if API is not available + return { + 'userId': 31, + 'firstName': 'Sarah', + 'lastName': 'Martinez', + 'email': 's.martinez@neu.edu', + 'phone': '555-0301', + 'college': 'NEU', + 'industry': 'Academic', + 'gender': 'Female', + 'race': 'Hispanic', + 'nationality': 'American', + 'sexuality': 'Heterosexual', + 'disability': None + } + +# Function to fetch advisor's assigned students from API +def fetch_advisor_students(advisor_id): + try: + response = requests.get(f"{API_BASE_URL}/advisors/{advisor_id}/students") + logger.info(f"Fetching advisor students from API: status_code={response.status_code}") + if response.status_code == 200: + return response.json() + return [] + except Exception as e: + logger.error(f"Error fetching advisor students: {e}") + # Fallback data if API is not available + return [ + { + 'userId': 1, + 'firstName': 'Charlie', + 'lastName': 'Stout', + 'email': 'c.stout@student.edu', + 'phone': '555-0101', + 'major': 'Computer Science', + 'minor': 'Mathematics', + 'college': 'Khoury College of Computer Sciences', + 'gradYear': '2026', + 'grade': 'Junior' + }, + { + 'userId': 2, + 'firstName': 'Liam', + 'lastName': 'Williams', + 'email': 'l.williams@student.edu', + 'phone': '555-0102', + 'major': 'Business', + 'minor': 'Economics', + 'college': 'D\'Amore-McKim School of Business', + 'gradYear': '2025', + 'grade': 'Senior' + }, + { + 'userId': 3, + 'firstName': 'Sophia', + 'lastName': 'Brown', + 'email': 's.brown@student.edu', + 'phone': '555-0103', + 'major': 'Mechanical Engineering', + 'minor': 'Physics', + 'college': 'College of Engineering', + 'gradYear': '2027', + 'grade': 'Sophomore' + } + ] + +# Function to fetch student application statistics +def fetch_student_application_stats(student_id): + try: + response = requests.get(f"{API_BASE_URL}/student/{student_id}/applications/summary") + logger.info(f"Fetching student application stats from API: status_code={response.status_code}") + if response.status_code == 200: + data = response.json() + logger.info(f"Student application stats received: {data}") + return data + else: + logger.warning(f"Failed to fetch student application stats, status code: {response.status_code}") + return [] + except Exception as e: + logger.error(f"Error fetching student application stats: {e}") + return [] + +# Function to update advisor data +def update_advisor_data(advisor_id, advisor_data): + try: + response = requests.put(f"{API_BASE_URL}/advisors/{advisor_id}/profile", json=advisor_data) + logger.info(f"Updating advisor profile: status_code={response.status_code}") + return response.status_code == 200 + except Exception as e: + logger.error(f"Error updating advisor data: {e}") + return False + +# Function to update student flag status +def update_student_flag(advisor_id, student_id, flagged): + try: + response = requests.put(f"{API_BASE_URL}/advisors/{advisor_id}/students/{student_id}/flag", + json={"flagged": flagged}) + logger.info(f"Updating student flag: status_code={response.status_code}") + return response.status_code == 200 + except Exception as e: + logger.error(f"Error updating student flag: {e}") + return False + +# Fetch data +advisor_data = fetch_advisor_data(advisor_user_id) +advisor_students = fetch_advisor_students(advisor_user_id) + +# Display advisor information +if advisor_data: + st.subheader("👨‍🏫 Your Profile") + col1, col2 = st.columns(2) + + with col1: + st.write(f"**Name:** {advisor_data.get('firstName', '')} {advisor_data.get('lastName', '')}") + st.write(f"**Email:** {advisor_data.get('email', '')}") + st.write(f"**Phone:** {advisor_data.get('phone', '')}") + + with col2: + st.write(f"**College:** {advisor_data.get('college', '')}") + st.write(f"**Industry:** {advisor_data.get('industry', '')}") + st.write(f"**Gender:** {advisor_data.get('gender', '')}") + +st.markdown("---") + +# Display students +if advisor_students: + st.subheader(f"Your Advisees ({len(advisor_students)} students)") + + # Search and filter functionality + search_col1, search_col2 = st.columns([2, 1]) + + with search_col1: + search_term = st.text_input("🔍 Search students by name or major", placeholder="Enter student name or major...") + + with search_col2: + grad_years = sorted(list(set([student.get('gradYear', '') for student in advisor_students if student.get('gradYear')]))) + selected_year = st.selectbox("Filter by Graduation Year", ["All"] + grad_years) + + # Filter students based on search and year + filtered_students = advisor_students + + if search_term: + filtered_students = [ + student for student in filtered_students + if search_term.lower() in f"{student.get('firstName', '')} {student.get('lastName', '')}".lower() + or search_term.lower() in student.get('major', '').lower() + ] + + if selected_year != "All": + filtered_students = [ + student for student in filtered_students + if student.get('gradYear') == selected_year + ] + + st.markdown("---") + + # Display students in cards + for i, student in enumerate(filtered_students): + # Check if student is flagged + is_flagged = student.get('flagged', False) + + # Create container with conditional styling for flagged students + if is_flagged: + with st.container(): + st.markdown(""" +
+ """, unsafe_allow_html=True) + + col1, col2, col3, col4 = st.columns([2, 2, 1.5, 0.5]) + + with col1: + st.markdown(f"**🚩 {student.get('firstName', '')} {student.get('lastName', '')}**") + st.write(f"📧 {student.get('email', '')}") + st.write(f"📱 {student.get('phone', '')}") + + with col2: + st.write(f"🎓 **Major:** {student.get('major', '')}") + if student.get('minor'): + st.write(f"📚 **Minor:** {student.get('minor', '')}") + st.write(f"🏫 **College:** {student.get('college', '')}") + st.write(f"📅 **Graduation:** {student.get('gradYear', '')} ({student.get('grade', '')})") + + with col3: + # Fetch detailed application stats for this student + app_stats = fetch_student_application_stats(student.get('userId')) + + # Create status counts + status_counts = {item.get('status', ''): item.get('ApplicationCount', 0) for item in app_stats} if app_stats else {} + under_review = status_counts.get('Under Review', 0) + submitted = status_counts.get('Submitted', 0) + rejected = status_counts.get('Rejected', 0) + + # Display detailed metrics in a compact layout + st.markdown("**Application Status:**") + metric_col1, metric_col2, metric_col3 = st.columns(3) + with metric_col1: + st.metric("📋 Review", under_review) + with metric_col2: + st.metric("📤 Submit", submitted) + with metric_col3: + st.metric("❌ Reject", rejected) + + with col4: + # Flag toggle + if st.button("🚩 Unflag", key=f"unflag_{student.get('userId')}", use_container_width=True): + if update_student_flag(advisor_user_id, student.get('userId'), False): + st.success("Student unflagged!") + st.rerun() + else: + st.error("Failed to unflag student") + + st.markdown("
", unsafe_allow_html=True) + else: + with st.container(): + col1, col2, col3, col4 = st.columns([2, 2, 1.5, 0.5]) + + with col1: + st.markdown(f"**{student.get('firstName', '')} {student.get('lastName', '')}**") + st.write(f"📧 {student.get('email', '')}") + st.write(f"📱 {student.get('phone', '')}") + + with col2: + st.write(f"🎓 **Major:** {student.get('major', '')}") + if student.get('minor'): + st.write(f"📚 **Minor:** {student.get('minor', '')}") + st.write(f"🏫 **College:** {student.get('college', '')}") + st.write(f"📅 **Graduation:** {student.get('gradYear', '')} ({student.get('grade', '')})") + + with col3: + # Fetch detailed application stats for this student + app_stats = fetch_student_application_stats(student.get('userId')) + + # Create status counts + status_counts = {item.get('status', ''): item.get('ApplicationCount', 0) for item in app_stats} if app_stats else {} + under_review = status_counts.get('Under Review', 0) + submitted = status_counts.get('Submitted', 0) + rejected = status_counts.get('Rejected', 0) + + # Display detailed metrics in a compact layout + st.markdown("**Application Status:**") + metric_col1, metric_col2, metric_col3 = st.columns(3) + with metric_col1: + st.metric("📋 Review", under_review) + with metric_col2: + st.metric("📤 Submit", submitted) + with metric_col3: + st.metric("❌ Reject", rejected) + + with col4: + # Flag toggle + if st.button("🏳️ Flag", key=f"flag_{student.get('userId')}", use_container_width=True): + if update_student_flag(advisor_user_id, student.get('userId'), True): + st.success("Student flagged!") + st.rerun() + else: + st.error("Failed to flag student") + + st.markdown("---") + + else: + st.info("No students assigned to you at this time.") \ No newline at end of file diff --git a/app/src/pages/20_Admin_Home.py b/app/src/pages/20_Admin_Home.py deleted file mode 100644 index 0dbd0f36b4..0000000000 --- a/app/src/pages/20_Admin_Home.py +++ /dev/null @@ -1,17 +0,0 @@ -import logging -logger = logging.getLogger(__name__) - -import streamlit as st -from modules.nav import SideBarLinks -import requests - -st.set_page_config(layout = 'wide') - -SideBarLinks() - -st.title('System Admin Home Page') - -if st.button('Update ML Models', - type='primary', - use_container_width=True): - st.switch_page('pages/21_ML_Model_Mgmt.py') \ No newline at end of file diff --git a/app/src/pages/20_Employer_Home.py b/app/src/pages/20_Employer_Home.py new file mode 100644 index 0000000000..11be03ba69 --- /dev/null +++ b/app/src/pages/20_Employer_Home.py @@ -0,0 +1,154 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests + +st.set_page_config(layout = 'wide') + +SideBarLinks() + +st.title('Employer Home Page') + +# Phoebe Hwang's userId from your database +API_BASE_URL = "http://web-api:4000" + +# Get the user_id from session state +phoebe_user_id = 37 + +if phoebe_user_id is None: + st.error("User not logged in. Please return to home and log in.") + st.stop() + +# Get the company_profile_id from session state +company_profile_id = 1 + +if company_profile_id is None: + st.error("Company not found") + st.stop() + + +# Function to fetch user data from API +def fetch_user_data(phoebe_user_id): + try: + response = requests.get(f"{API_BASE_URL}/users/{phoebe_user_id}") + if response.status_code == 200: + data = response.json() + return data[0] if data else None + return None + except Exception as e: + logger.error(f"Error fetching user data: {e}") + # Fallback data if API is not available + return { + 'userId': 37, + 'firstName': 'Phoebe', + 'lastName': 'Hwang', + 'email': 'p.hwang@technova.com', + 'phone': '555-0401', + 'companyProfileId': '1', + 'industry': 'Technology' + } + +# Function to fetch company data from API +def fetch_company_data(company_profile_id): + try: + response = requests.get(f"{API_BASE_URL}/users/{company_profile_id}/companyProfiles") + if response.status_code == 200: + data = response.json() + return data[0] if data else None + return None + except Exception as e: + logger.error(f"Error fetching company data: {e}") + # Fallback data if API is not available + return { + 'companyProfileId': 1, + 'name': 'TechNova Inc', + 'bio': 'Leading software development company specializing in enterprise solutions and cloud infrastructure.', + 'industry': 'Technology', + 'websiteLink': 'www.technova.com' + } + +# Function to update user data via API +def update_user_data(user_data): + try: + response = requests.put(f"{API_BASE_URL}/users", json=user_data) + return response.status_code == 200 + except Exception as e: + logger.error(f"Error updating user data: {e}") + return False + +# Function to update company data via API /users//companyProfiles/update /cprof/companyProfiles/update/{company_profile_id} +def update_company_data(company_data): + try: + response = requests.put(f"{API_BASE_URL}/users/{company_profile_id}/companyProfiles/update", json=company_data) + return response.status_code == 200 + except Exception as e: + logger.error(f"Error updating company data: {e}") + return False + +user_data = fetch_user_data(phoebe_user_id) +company_data = fetch_company_data(company_profile_id) + +if user_data: + # Header + st.subheader(f"Welcome back, {user_data['firstName']}!") + + # Company Information Form + with st.form("company_form"): + st.subheader("Company Information") + col1 = st.columns(1)[0] + + with col1: + # Add null check for company_data + company_name = st.text_input("Company Name", value=company_data.get("name", "") if company_data else "") + bio = st.text_input("Who Are We", value=company_data.get("bio", "") if company_data else "") + industry = st.text_input("Industry", value=company_data.get("industry", "") if company_data else "") + website_link = st.text_input("Website", value=company_data.get("websiteLink", "") if company_data else "") + + company_submitted = st.form_submit_button("Update Company Profile", type="primary", use_container_width=True) + + if company_submitted: + company_update_data = { + "name": company_name, + "bio": bio, + "industry": industry, + "websiteLink": website_link + } + + if update_company_data(company_update_data): + st.success("✅ Profile updated successfully!") + st.rerun() + else: + st.error("❌ Failed to update profile") + + # Personal Information Form (separate form) + with st.form("personal_form"): + st.subheader("Personal Information") + personal_col1 = st.columns(1)[0] + + with personal_col1: + first_name = st.text_input("First Name", value=user_data.get("firstName", "")) + last_name = st.text_input("Last Name", value=user_data.get("lastName", "")) + email = st.text_input("Email", value=user_data.get("email", "")) + phone = st.text_input("Phone", value=user_data.get("phone", "")) + + personal_submitted = st.form_submit_button("Update Personal Profile", type="primary", use_container_width=True) + + if personal_submitted: + personal_update_data = { + "userId": phoebe_user_id, + "firstName": first_name, + "lastName": last_name, + "email": email, + "phone": phone + } + + if update_user_data(personal_update_data): + st.success("✅ Personal profile updated successfully!") + st.rerun() + else: + st.error("❌ Failed to update personal profile") + +else: + st.error("Unable to load user data. Please try again later.") \ No newline at end of file diff --git a/app/src/pages/21_Employer_Postings.py b/app/src/pages/21_Employer_Postings.py new file mode 100644 index 0000000000..bdbcf47a0a --- /dev/null +++ b/app/src/pages/21_Employer_Postings.py @@ -0,0 +1,261 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests +from datetime import datetime, date + +st.set_page_config(layout='wide') + +SideBarLinks() + +st.title('🆕 Create Co-op Posting') + +logger.info("Loading Create Co-op Posting page") + +# API configuration +API_BASE_URL = "http://web-api:4000" + +# Get the user_id from session state (use real session state in production) +employer_user_id = 37 #st.session_state.get("user_id", 37) # Default to 37 for demo + +if employer_user_id is None: + st.error("User not logged in. Please return to home and log in.") + st.stop() + +# Function to fetch available skills +def fetch_skills(): + try: + response = requests.get(f"{API_BASE_URL}/skills") + logger.info(f"Fetching skills: status_code={response.status_code}") + + if response.status_code == 200: + data = response.json() + logger.info(f"Skills data received: {len(data)} skills") + return data + else: + logger.warning(f"Failed to fetch skills, status code: {response.status_code}") + return [] + except Exception as e: + logger.error(f"Error fetching skills: {e}") + return [] + +# Function to get next available co-op position ID +def get_next_coop_position_id(): + try: + response = requests.get(f"{API_BASE_URL}/coopPositions") + if response.status_code == 200: + positions = response.json() + if positions: + max_id = max([pos.get('coopPositionId', 0) for pos in positions]) + return max_id + 1 + else: + return 1 + else: + return 1 + except Exception as e: + logger.error(f"Error getting next position ID: {e}") + return 1 + +# Function to create co-op position +def create_coop_position(position_data): + try: + response = requests.post(f"{API_BASE_URL}/coopPositions", json=position_data) + logger.info(f"Creating co-op position: status_code={response.status_code}") + return response.status_code in (200, 201) + except Exception as e: + logger.error(f"Error creating co-op position: {e}") + return False + +# Function to link employer to position +def link_employer_to_position(employer_id, position_id): + try: + link_data = { + "employerId": employer_id, + "coopPositionId": position_id + } + response = requests.post(f"{API_BASE_URL}/createsPos", json=link_data) + logger.info(f"Linking employer to position: status_code={response.status_code}") + return response.status_code in (200, 201) + except Exception as e: + logger.error(f"Error linking employer to position: {e}") + return False + +# Function to fetch user data to get company info +def fetch_user_data(user_id): + try: + response = requests.get(f"{API_BASE_URL}/users/{user_id}") + if response.status_code == 200: + data = response.json() + return data[0] if data else None + return None + except Exception as e: + logger.error(f"Error fetching user data: {e}") + return None + +# Fetch user data and skills +user_data = fetch_user_data(employer_user_id) +available_skills = fetch_skills() + +if not user_data: + st.error("Unable to load user data. Please try again later.") + st.stop() + +# Header +st.subheader(f"👋 Hello, {user_data['firstName']} {user_data['lastName']}!") +st.info("Create a new co-op position for your company.") + +# Create the form +with st.form("create_coop_form"): + st.subheader("📋 Position Details") + + # Basic position information + col1, col2 = st.columns(2) + + with col1: + title = st.text_input("Position Title*") + location = st.text_input("Location*") + hourly_pay = st.number_input("Hourly Pay ($)*", min_value=0.0, value=20.0, step=0.50, format="%.2f") + industry = st.selectbox("Industry*", [ + "Technology", "Finance", "Healthcare", "Manufacturing", + "Consulting", "Education", "Marketing", "Engineering", + "Biotechnology", "Non-profit", "Other" + ], index=0) + + with col2: + start_date = st.date_input("Start Date*") + end_date = st.date_input("End Date*") + deadline = st.date_input("Application Deadline*") + desired_gpa = st.number_input("Minimum GPA", min_value=0.0, max_value=4.0, value=3.0, step=0.1, format="%.1f") + + # Description + st.subheader("📝 Position Description") + description = st.text_area( + "Job Description*", + placeholder="Describe what students can expect in this position", + height=150 + ) + + # Skills section + st.subheader("🛠️ Skills Requirements") + + skill_options = [] + skill_ids = {} + + if available_skills: + for skill in available_skills: + skill_display = f"{skill['name']} ({skill['category']})" + skill_options.append(skill_display) + skill_ids[skill_display] = skill['skillId'] + + col1, col2 = st.columns(2) + + with col1: + st.write("**Required Skills** (Must have)") + required_skills = st.multiselect( + "Select required skills", + options=skill_options, + help="Students must have these skills to be eligible" + ) + + with col2: + st.write("**Desired Skills** (Nice to have)") + desired_skills = st.multiselect( + "Select desired skills", + options=skill_options, + help="Preferred skills that would be beneficial" + ) + + if required_skills or desired_skills: + st.info(f"📋 **Selected:** {len(required_skills)} required, {len(desired_skills)} desired skills") + + # Submit button + submitted = st.form_submit_button("🚀 Create Co-op Position", type="primary", use_container_width=True) + + if submitted: + # Validation + errors = [] + + if not title.strip(): + errors.append("Position title is required") + if not location.strip(): + errors.append("Location is required") + if not description.strip(): + errors.append("Job description is required") + if hourly_pay <= 0: + errors.append("Hourly pay must be greater than 0") + if start_date >= end_date: + errors.append("End date must be after start date") + if deadline >= start_date: + errors.append("Application deadline must be before start date") + + if errors: + for error in errors: + st.error(f"❌ {error}") + else: + # Get next position ID + next_position_id = get_next_coop_position_id() + + # Prepare position data + position_data = { + "coopPositionId": next_position_id, + "title": title.strip(), + "location": location.strip(), + "description": description.strip(), + "hourlyPay": float(hourly_pay), + "requiredSkillsId": skill_ids.get(required_skills[0]) if required_skills else None, + "desiredSkillsId": skill_ids.get(desired_skills[0]) if desired_skills else None, + "desiredGPA": float(desired_gpa), + "deadline": f"{deadline} 23:59:59", + "startDate": str(start_date), + "endDate": str(end_date), + "flag": False, + "industry": industry + } + + # Create the position + if create_coop_position(position_data): + # Link employer to position + if link_employer_to_position(employer_user_id, next_position_id): + st.success("🎉 Co-op position created successfully!") + st.balloons() + + # Display summary + st.subheader("📊 Position Summary") + summary_col1, summary_col2 = st.columns(2) + + with summary_col1: + st.write(f"**Position ID:** {next_position_id}") + st.write(f"**Title:** {title}") + st.write(f"**Location:** {location}") + st.write(f"**Industry:** {industry}") + st.write(f"**Hourly Pay:** ${hourly_pay:.2f}") + + # And update the summary display section: + with summary_col2: + st.write(f"**Start Date:** {start_date}") + st.write(f"**End Date:** {end_date}") + st.write(f"**Application Deadline:** {deadline}") + st.write(f"**Minimum GPA:** {desired_gpa}") + if required_skills: + if len(required_skills) == 1: + st.write(f"**Required Skill:** {required_skills[0]}") + else: + st.write(f"**Required Skills:** {required_skills[0]} (+{len(required_skills)-1} more)") + if desired_skills: + if len(desired_skills) == 1: + st.write(f"**Desired Skill:** {desired_skills[0]}") + else: + st.write(f"**Desired Skills:** {desired_skills[0]} (+{len(desired_skills)-1} more)") + + st.info("💡 Students can now view and apply to this position!") + + # Option to create another position + if st.button("➕ Create Another Position"): + st.rerun() + + else: + st.error("❌ Position created but failed to link to employer. Please contact support.") + else: + st.error("❌ Failed to create co-op position. Please try again.") diff --git a/app/src/pages/22_Employer_Applications.py b/app/src/pages/22_Employer_Applications.py new file mode 100644 index 0000000000..4150f7a54c --- /dev/null +++ b/app/src/pages/22_Employer_Applications.py @@ -0,0 +1,12 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests + +st.set_page_config(layout = 'wide') + +SideBarLinks() + +st.title('Applications') \ No newline at end of file diff --git a/app/src/pages/23_Employer_Candidates.py b/app/src/pages/23_Employer_Candidates.py new file mode 100644 index 0000000000..971ab23f84 --- /dev/null +++ b/app/src/pages/23_Employer_Candidates.py @@ -0,0 +1,12 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests + +st.set_page_config(layout = 'wide') + +SideBarLinks() + +st.title('Applicants') \ No newline at end of file diff --git a/app/src/pages/24_Employer_Profile.py b/app/src/pages/24_Employer_Profile.py new file mode 100644 index 0000000000..72b06b74ad --- /dev/null +++ b/app/src/pages/24_Employer_Profile.py @@ -0,0 +1,12 @@ +import logging +logger = logging.getLogger(__name__) + +import streamlit as st +from modules.nav import SideBarLinks +import requests + +st.set_page_config(layout = 'wide') + +SideBarLinks() + +st.title('Employer Profile') \ No newline at end of file diff --git a/app/src/pages/30_Admin_Home.py b/app/src/pages/30_Admin_Home.py new file mode 100644 index 0000000000..2a50189816 --- /dev/null +++ b/app/src/pages/30_Admin_Home.py @@ -0,0 +1,78 @@ +import os, requests, pandas as pd, streamlit as st +from modules.nav import SideBarLinks + +# ----- Config ----- +BASE_API = os.getenv("BASE_API", "http://web-api:4000") +COOP_API = f"{BASE_API}/coopPositions" +DEI_API = f"{BASE_API}/api/dei" # ok if not registered; handled below +TIMEOUT = 10 + +st.set_page_config(page_title="Admin • Coopalytics", layout="wide", initial_sidebar_state="expanded") +SideBarLinks() +st.title("⚙️ System Admin Home Page") + +# ----- Helpers ----- +def get_json(url): + r = requests.get(url, timeout=TIMEOUT); r.raise_for_status(); return r.json() + +# ----- Load data (safe fallbacks) ----- +pending_df = pd.DataFrame() +employers_df = pd.DataFrame() +dei_gender = pd.DataFrame() + +try: + pending_df = pd.DataFrame(get_json(f"{COOP_API}/pending")) +except Exception: + pending_df = pd.DataFrame() + +try: + employers_df = pd.DataFrame(get_json(f"{COOP_API}/employerJobCounts")) +except Exception: + employers_df = pd.DataFrame() + +try: + dei_gender = pd.DataFrame(get_json(f"{DEI_API}/representation/gender")) +except Exception: + dei_gender = pd.DataFrame() + +# ----- KPIs ----- +c1, c2, c3, c4 = st.columns(4) +c1.metric("Pending Postings", 0 if pending_df.empty else len(pending_df)) +active_employers = 0 if employers_df.empty else employers_df[employers_df["numJobs"] > 0]["employerId"].nunique() +c2.metric("Active Employers", active_employers) +total_jobs = 0 if employers_df.empty else int(employers_df["numJobs"].sum()) +c3.metric("Total Jobs Posted", total_jobs) +c4.metric("DEI Data (gender groups)", 0 if dei_gender.empty else len(dei_gender)) + +st.divider() + +# ----- Quick links ----- +st.subheader("🚀 Quick Links") +col1, col2, col3 = st.columns(3) +with col1: + st.page_link("pages/32_Admin_Postings.py", label="Review Job Postings", icon="✅") +with col2: + st.page_link("pages/31_Admin_Employers.py", label="Manage Employers", icon="🏢") +with col3: + st.page_link("pages/33_Admin_DEI.py", label="DEI Metrics", icon="🌍") + +st.divider() + +# ----- Tables (preview) ----- +left, right = st.columns(2, gap="large") + +with left: + st.subheader("📌 Pending (Top 10)") + if pending_df.empty: + st.info("No pending positions.") + else: + show = pending_df[["coopPositionId","title","companyName","location","deadline","hourlyPay","industry"]].copy() + st.dataframe(show.head(10), use_container_width=True) + +with right: + st.subheader("🏢 Employers by Job Count") + if employers_df.empty: + st.info("No employer data.") + else: + show_e = employers_df[["employerId","firstName","lastName","companyName","numJobs"]].sort_values("numJobs", ascending=False) + st.dataframe(show_e.head(10), use_container_width=True) \ No newline at end of file diff --git a/app/src/pages/31_Admin_Employers.py b/app/src/pages/31_Admin_Employers.py new file mode 100644 index 0000000000..0d01cc3ead --- /dev/null +++ b/app/src/pages/31_Admin_Employers.py @@ -0,0 +1,80 @@ +import os, requests, pandas as pd, streamlit as st +from modules.nav import SideBarLinks + +# ---- Config ---- +BASE_API = os.getenv("BASE_API", "http://web-api:4000") +COOP_API = f"{BASE_API}/coopPositions" +TIMEOUT = 10 + +st.set_page_config(page_title="Employer Accounts • Coopalytics", layout="wide") +SideBarLinks() +st.title("🏢 Employer Accounts") + +# ---- Helpers ---- +def get_json(url): + r = requests.get(url, timeout=TIMEOUT); r.raise_for_status(); return r.json() + + +# ---- Load data ---- +try: + counts_df = pd.DataFrame(get_json(f"{COOP_API}/employerJobCounts")) +except Exception as e: + counts_df = pd.DataFrame() + st.error(f"Could not load employer job counts: {e}") + +# ---- Top stats ---- +col1, col2, col3 = st.columns(3) +total_employers = counts_df["employerId"].nunique() if not counts_df.empty else 0 +active_employers = counts_df[counts_df["numJobs"] > 0]["employerId"].nunique() if not counts_df.empty else 0 +zero_job_employers = total_employers - active_employers + +col1.metric("Total Employers", total_employers) +col2.metric("Active (≥1 job)", active_employers) +col3.metric("No Jobs Yet", zero_job_employers) + +st.divider() + +# ---- Filters / search ---- +with st.container(): + fcol1, fcol2, fcol3 = st.columns([2,1,1]) + query = fcol1.text_input("Search employers (name/company)", value="") + min_jobs = fcol2.number_input("Min jobs", min_value=0, value=0, step=1) + sort_by = fcol3.selectbox("Sort by", ["numJobs ↓","numJobs ↑","lastName A→Z","company A→Z"], index=0) + +view = counts_df.copy() +if not view.empty: + # compose display name and filter + view["employerName"] = (view["firstName"].fillna("") + " " + view["lastName"].fillna("")).str.strip() + if query: + q = query.lower() + view = view[ + view["employerName"].str.lower().str.contains(q, na=False) | + view["companyName"].str.lower().str.contains(q, na=False) + ] + view = view[view["numJobs"] >= min_jobs] + + # sorting + if sort_by == "numJobs ↓": + view = view.sort_values(["numJobs","lastName","firstName"], ascending=[False,True,True]) + elif sort_by == "numJobs ↑": + view = view.sort_values(["numJobs","lastName","firstName"], ascending=[True,True,True]) + elif sort_by == "lastName A→Z": + view = view.sort_values(["lastName","firstName"]) + else: # company A→Z + view = view.sort_values(["companyName","lastName","firstName"]) + +st.subheader("Accounts") +if view.empty: + st.info("No employers match your filters.") +else: + show = view[["employerId","employerName","companyName","numJobs"]].rename( + columns={ + "employerId":"Employer ID", + "employerName":"Employer", + "companyName":"Company", + "numJobs":"# Jobs" + } + ) + st.dataframe(show, use_container_width=True) + +st.caption("Data source: /api/coopPositions/employerJobCounts") \ No newline at end of file diff --git a/app/src/pages/32_Admin_Postings.py b/app/src/pages/32_Admin_Postings.py new file mode 100644 index 0000000000..d7d5b3c3b2 --- /dev/null +++ b/app/src/pages/32_Admin_Postings.py @@ -0,0 +1,129 @@ +import os, requests, pandas as pd, streamlit as st +from modules.nav import SideBarLinks + +# ---- Config ---- +BASE_API = os.getenv("BASE_API", "http://web-api:4000") +COOP_API = f"{BASE_API}/coopPositions" +TIMEOUT = 10 + +st.set_page_config(page_title="Review Job Postings • Coopalytics", layout="wide", initial_sidebar_state="expanded") +SideBarLinks() +st.title("📝 Review Job Postings") + +# ---- Helpers ---- +def get_json(url): + r = requests.get(url, timeout=TIMEOUT); r.raise_for_status(); return r.json() + +def put_json(url, payload=None): + r = requests.put(url, json=payload or {}, timeout=TIMEOUT); r.raise_for_status(); return r.json() + +def delete_json(url): + r = requests.delete(url, timeout=TIMEOUT); r.raise_for_status(); return r.json() + +def flag_json(pos_id, value: int): + r = requests.put(f"{COOP_API}/{pos_id}/flag/{value}", timeout=TIMEOUT); r.raise_for_status(); return r.json() + +def unflag_json(pos_id): + r = requests.put(f"{COOP_API}/{pos_id}/unflag", timeout=TIMEOUT); r.raise_for_status(); return r.json() + +# ---- Load pending ---- +try: + pending = pd.DataFrame(get_json(f"{COOP_API}/pending")) +except Exception as e: + st.error(f"Could not load pending positions: {e}") + pending = pd.DataFrame() + +# ---- Top bar (metrics + filters) ---- +c1, c2, c3, c4 = st.columns([1,1,2,2]) +total_pending = 0 if pending.empty else len(pending) +c1.metric("Pending", total_pending) +try: + avg_pay = pd.DataFrame(get_json(f"{COOP_API}/industryAveragePay")) + c2.metric("Industries", 0 if avg_pay.empty else len(avg_pay)) +except Exception: + c2.metric("Industries", "—") + +# Filters +q = c3.text_input("Search title/company/location", "") +industry_filter = c4.selectbox( + "Industry filter", + ["All"] + (sorted(pending["industry"].dropna().unique().tolist()) if not pending.empty else ["All"]), + index=0 +) + +# Apply filters +view = pending.copy() +if not view.empty: + view["companyName"] = view.get("companyName", "") + if q: + ql = q.lower() + view = view[ + view["title"].str.lower().str.contains(ql, na=False) | + view["companyName"].astype(str).str.lower().str.contains(ql, na=False) | + view["location"].str.lower().str.contains(ql, na=False) + ] + if industry_filter != "All": + view = view[view["industry"] == industry_filter] + +st.divider() + +# ---- Table + actions ---- +left, right = st.columns([2.2, 1]) + +with left: + st.subheader("📌 Pending Positions") + if view.empty: + st.info("No pending positions match your filters.") + else: + show = view[[ + "coopPositionId","title","companyName","location","hourlyPay","deadline","startDate","endDate","industry" + ]].sort_values(["deadline","coopPositionId"], ascending=[True, False]) + st.dataframe(show, use_container_width=True, height=420) + +with right: + st.subheader("⚡ Quick Actions") + pos_id = st.number_input("Position ID", min_value=0, step=1, value=0) + a1, a2 = st.columns(2) + a3, a4 = st.columns(2) + + if a1.button("Approve", type="primary", use_container_width=True, disabled=pos_id<=0): + try: + put_json(f"{COOP_API}/{int(pos_id)}/approve") + st.success(f"Approved {int(pos_id)}"); st.rerun() + except Exception as e: + st.error(f"Approve failed: {e}") + + if a2.button("Delete", use_container_width=True, disabled=pos_id<=0): + try: + delete_json(f"{COOP_API}/{int(pos_id)}") + st.success(f"Deleted {int(pos_id)}"); st.rerun() + except Exception as e: + st.error(f"Delete failed: {e}") + + if a3.button("Flag", use_container_width=True, disabled=pos_id<=0): + try: + flag_json(int(pos_id), 1) + st.success(f"Flagged {int(pos_id)}"); st.rerun() + except Exception as e: + st.error(f"Flag failed: {e}") + + if a4.button("Unflag", use_container_width=True, disabled=pos_id<=0): + try: + unflag_json(int(pos_id)) + st.success(f"Unflagged {int(pos_id)}"); st.rerun() + except Exception as e: + st.error(f"Unflag failed: {e}") + +st.divider() + +# ---- Industry averages (optional context) ---- +st.subheader("💸 Industry Average Hourly Pay") +try: + if 'avg_pay' not in locals(): + avg_pay = pd.DataFrame(get_json(f"{COOP_API}/industryAveragePay")) + if avg_pay.empty: + st.caption("No data.") + else: + st.dataframe(avg_pay.rename(columns={"industry":"Industry","industryAvgHourlyPay":"Avg $/hr"}), use_container_width=True) +except Exception as e: + st.caption(f"Could not load averages: {e}") diff --git a/app/src/pages/33_Admin_DEI.py b/app/src/pages/33_Admin_DEI.py new file mode 100644 index 0000000000..02a087aa0d --- /dev/null +++ b/app/src/pages/33_Admin_DEI.py @@ -0,0 +1,66 @@ +# Admin_DEI.py +import os, requests, pandas as pd, streamlit as st +from modules.nav import SideBarLinks + +st.set_page_config(page_title="DEI Metrics • Coopalytics", layout="wide") +SideBarLinks() +st.title("DEI Metrics") + +BASE_API = os.getenv("BASE_API", "http://web-api:4000") + +def fetch_json(path): + r = requests.get(f"{BASE_API}{path}", timeout=10) + r.raise_for_status() + return r.json() + +# Try a single summary endpoint first; fall back to per-dimension endpoints if needed +data = {} +try: + # EXPECTED SHAPE: + # {"gender":[{"label":"Female","count":120},...], + # "race":[{"label":"Asian","count":90},...], + # "nationality":[...], + # "disability":[...]} + data = fetch_json("/api/dei/metrics") +except Exception: + # Fallback to separate endpoints if your API exposes them + for dim in ["gender", "race", "nationality", "disability"]: + try: + data[dim] = fetch_json(f"/api/dei/{dim}") + except Exception: + data[dim] = [] + +dims = [k for k,v in data.items() if isinstance(v, list) and len(v) > 0] +if not dims: + st.info("No DEI data available.") + st.stop() + +# Selector +colA, colB = st.columns([2,1]) +with colA: + dim = st.selectbox("Select metric", dims, index=0) +with colB: + show_table = st.toggle("Show table", value=False) + +# Prep dataframe +df = pd.DataFrame(data[dim]) +if "label" not in df.columns or "count" not in df.columns: + st.error(f"Endpoint for '{dim}' must return items with 'label' and 'count'.") + st.stop() + +df = df.groupby("label", as_index=False)["count"].sum().sort_values("count", ascending=False) +total = int(df["count"].sum()) + +# KPIs +k1,k2,k3 = st.columns(3) +k1.metric("Total records", f"{total}") +k2.metric("Distinct categories", f"{df.shape[0]}") +coverage = 100 if total > 0 else 0 +k3.metric("Coverage (%)", f"{coverage:.0f}%") + +# Chart + (optional) table +st.subheader(dim.capitalize()) +st.bar_chart(df.set_index("label")["count"]) + +if show_table: + st.dataframe(df, use_container_width=True) \ No newline at end of file diff --git a/app/src/pages/34_Admin_Analytics.py b/app/src/pages/34_Admin_Analytics.py new file mode 100644 index 0000000000..3d17949993 --- /dev/null +++ b/app/src/pages/34_Admin_Analytics.py @@ -0,0 +1,216 @@ +import streamlit as st +import requests +import pandas as pd +import plotly.express as px +import plotly.graph_objects as go +from datetime import datetime +import time + +import logging +logging.basicConfig(format='%(filename)s:%(lineno)s:%(levelname)s -- %(message)s', level=logging.INFO) +logger = logging.getLogger(__name__) + +from modules.nav import SideBarLinks + +st.set_page_config(layout='wide', page_title="Admin Analytics Dashboard") +SideBarLinks() + +# Custom CSS for professional styling +st.markdown(""" + +""", unsafe_allow_html=True) + +# Header Section +st.markdown(""" +
+

📊 Admin Analytics Dashboard

+

Comprehensive overview of system metrics and user analytics

+
+""", unsafe_allow_html=True) + +# Test the API endpoints +test_url = "http://web-api:4000" + +# User Metrics Section +st.markdown('

👥 User Analytics

', unsafe_allow_html=True) + +# Create three columns for metrics +col1, col2, col3 = st.columns(3) + +with col1: + # Fetch student count first + try: + response = requests.get(f"{test_url}/users/count/students", timeout=5) + if response.status_code == 200: + data = response.json() + student_count = data.get('student_count', 0) + else: + student_count = 0 + except Exception as e: + student_count = 0 + + st.markdown(f""" +
+
👨‍🎓 Students
+
{student_count}
+
Active in system
+
+ """, unsafe_allow_html=True) + +with col2: + # Fetch advisor count first + try: + response = requests.get(f"{test_url}/users/count/advisors", timeout=5) + if response.status_code == 200: + data = response.json() + advisor_count = data.get('advisor_count', 0) + else: + advisor_count = 0 + except Exception as e: + advisor_count = 0 + + st.markdown(f""" +
+
👨‍🏫 Advisors
+
{advisor_count}
+
Academic support
+
+ """, unsafe_allow_html=True) + +with col3: + # Fetch employer count first + try: + response = requests.get(f"{test_url}/users/count/employers", timeout=5) + if response.status_code == 200: + data = response.json() + employer_count = data.get('employer_count', 0) + else: + employer_count = 0 + except Exception as e: + employer_count = 0 + + st.markdown(f""" +
+
🏢 Employers
+
{employer_count}
+
Industry partners
+
+ """, unsafe_allow_html=True) + +# Summary Section +st.markdown("---") +st.markdown('

📈 System Summary

', unsafe_allow_html=True) + +# Calculate total users +try: + student_response = requests.get(f"{test_url}/users/count/students", timeout=5) + advisor_response = requests.get(f"{test_url}/users/count/advisors", timeout=5) + employer_response = requests.get(f"{test_url}/users/count/employers", timeout=5) + + if all(r.status_code == 200 for r in [student_response, advisor_response, employer_response]): + student_data = student_response.json() + advisor_data = advisor_response.json() + employer_data = employer_response.json() + + total_users = student_data.get('student_count', 0) + advisor_data.get('advisor_count', 0) + employer_data.get('employer_count', 0) + + col1, col2, col3, col4 = st.columns(4) + + with col1: + st.metric("Total Users", total_users, delta=None) + + with col2: + st.metric("System Status", "Online", delta="✓", delta_color="normal") + + with col3: + st.metric("API Response", "Healthy", delta="< 5s", delta_color="normal") + + with col4: + st.metric("Last Updated", datetime.now().strftime("%H:%M"), delta="Live", delta_color="normal") + + else: + st.warning("⚠️ Some metrics are unavailable. Please check system connectivity.") + +except Exception as e: + st.error(f"❌ Unable to fetch system summary: {str(e)}") + +# Footer +st.markdown("---") +st.markdown(""" +
+ Admin Analytics Dashboard • Real-time monitoring • Last updated: """ + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + """ +
+""", unsafe_allow_html=True) + + diff --git a/app/src/pages/30_About.py b/app/src/pages/90_About.py similarity index 100% rename from app/src/pages/30_About.py rename to app/src/pages/90_About.py diff --git a/database-files/00_northwind.sql b/database-files-demo/00_northwind.sql similarity index 100% rename from database-files/00_northwind.sql rename to database-files-demo/00_northwind.sql diff --git a/database-files/01_northwind-default-current-timestamp.sql b/database-files-demo/01_northwind-default-current-timestamp.sql similarity index 100% rename from database-files/01_northwind-default-current-timestamp.sql rename to database-files-demo/01_northwind-default-current-timestamp.sql diff --git a/database-files/02_northwind-data.sql b/database-files-demo/02_northwind-data.sql similarity index 100% rename from database-files/02_northwind-data.sql rename to database-files-demo/02_northwind-data.sql diff --git a/database-files/03_add_to_northwind.sql b/database-files-demo/03_add_to_northwind.sql similarity index 100% rename from database-files/03_add_to_northwind.sql rename to database-files-demo/03_add_to_northwind.sql diff --git a/database-files/README.md b/database-files-demo/README.md similarity index 100% rename from database-files/README.md rename to database-files-demo/README.md diff --git a/database-files/classicModels.sql b/database-files-demo/classicModels.sql similarity index 100% rename from database-files/classicModels.sql rename to database-files-demo/classicModels.sql diff --git a/database-files/ngo_db.sql b/database-files-demo/ngo_db.sql similarity index 100% rename from database-files/ngo_db.sql rename to database-files-demo/ngo_db.sql diff --git a/database-files/01-coopalytics.sql b/database-files/01-coopalytics.sql new file mode 100644 index 0000000000..c6c27a002a --- /dev/null +++ b/database-files/01-coopalytics.sql @@ -0,0 +1,146 @@ +DROP DATABASE IF EXISTS `coopalytics`; +CREATE DATABASE `coopalytics`; +USE `coopalytics`; + +CREATE TABLE skills ( + skillId INT PRIMARY KEY, + name VARCHAR(20) NOT NULL, + category VARCHAR(20) NOT NULL +); + +CREATE TABLE companyProfiles ( + companyProfileId INT PRIMARY KEY, + name VARCHAR(50) NOT NULL, + bio LONGTEXT, + industry VARCHAR(30) NOT NULL, + websiteLink VARCHAR(100) +); + +CREATE TABLE users ( + userId INT PRIMARY KEY, + firstName VARCHAR(30) NOT NULL, + lastName VARCHAR(30) NOT NULL, + email VARCHAR(100) NOT NULL, + phone VARCHAR(20), + major VARCHAR(50), + minor VARCHAR(50), + college VARCHAR(100), + gradYear VARCHAR(10), + grade VARCHAR(20), + companyProfileId INT, + industry VARCHAR(30), + + FOREIGN KEY (companyProfileId) REFERENCES companyProfiles (companyProfileId) ON UPDATE CASCADE ON DELETE SET NULL +); + +CREATE TABLE demographics ( + demographicId INT PRIMARY KEY, + gender VARCHAR(20), + race VARCHAR(20), + nationality VARCHAR(20), + sexuality VARCHAR(20), + disability VARCHAR(20), + + FOREIGN KEY (demographicId) REFERENCES users (userId) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE coopPositions ( + coopPositionId INT PRIMARY KEY, + title VARCHAR(30) NOT NULL, + location VARCHAR(30) NOT NULL DEFAULT 'Not Specified', + description LONGTEXT NOT NULL, + hourlyPay FLOAT NOT NULL, + requiredSkillsId INT, + desiredSkillsId INT, + desiredGPA FLOAT, + deadline DATETIME, + startDate DATE NOT NULL, + endDate DATE NOT NULL, + flag BOOLEAN NOT NULL DEFAULT FALSE, + industry VARCHAR(30) NOT NULL DEFAULT 'Not Specified', + + FOREIGN KEY (requiredSkillsId) REFERENCES skills (skillId) ON UPDATE CASCADE ON DELETE SET NULL, + FOREIGN KEY (desiredSkillsId) REFERENCES skills (skillId) ON UPDATE CASCADE ON DELETE SET NULL +); + +CREATE TABLE skillDetails ( + skillId INT, + studentId INT, + proficiencyLevel INT NOT NULL, + + PRIMARY KEY (skillId, studentId), + FOREIGN KEY (skillId) REFERENCES skills (skillId) ON UPDATE CASCADE ON DELETE CASCADE, + FOREIGN KEY (studentId) REFERENCES users (userId) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE advisor_advisee ( + studentId INT, + advisorId INT, + flag BOOLEAN NOT NULL DEFAULT FALSE, + + PRIMARY KEY (studentId, advisorId), + FOREIGN KEY (studentId) REFERENCES users (userId) ON UPDATE CASCADE ON DELETE CASCADE, + FOREIGN KEY (advisorId) REFERENCES users (userId) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE workedAtPos ( + studentId INT, + coopPositionId INT, + startDate DATE NOT NULL, + endDate DATE NOT NULL, + companyRating INT, + + PRIMARY KEY (studentId, coopPositionId), + FOREIGN KEY (studentId) REFERENCES users (userId) ON UPDATE CASCADE ON DELETE CASCADE, + FOREIGN KEY (coopPositionId) REFERENCES coopPositions (coopPositionId) ON UPDATE CASCADE ON DELETE RESTRICT +); + +CREATE TABLE viewsPos ( + studentId INT, + coopPositionId INT, + preference BOOLEAN DEFAULT FALSE, + + PRIMARY KEY (studentId, coopPositionId), + FOREIGN KEY (studentId) REFERENCES users (userId) ON UPDATE CASCADE ON DELETE CASCADE, + FOREIGN KEY (coopPositionId) REFERENCES coopPositions (coopPositionId) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE createsPos ( + employerId INT, + coopPositionId INT, + + PRIMARY KEY (employerId, coopPositionId), + FOREIGN KEY (employerId) REFERENCES users (userId) ON UPDATE CASCADE ON DELETE RESTRICT, + FOREIGN KEY (coopPositionId) REFERENCES coopPositions (coopPositionId) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE applications ( + applicationId INT PRIMARY KEY, + dateTimeApplied DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + status VARCHAR(15) NOT NULL DEFAULT 'Draft', + resume LONGTEXT, + gpa FLOAT, + coverLetter LONGTEXT, + coopPositionId INT NOT NULL, + + FOREIGN KEY (coopPositionId) REFERENCES coopPositions (coopPositionId) ON UPDATE CASCADE ON DELETE RESTRICT +); + +CREATE TABLE appliesToApp ( + applicationId INT, + studentId INT, + + PRIMARY KEY (applicationId, studentId), + FOREIGN KEY (applicationId) REFERENCES applications (applicationId) ON UPDATE CASCADE ON DELETE CASCADE, + FOREIGN KEY (studentId) REFERENCES users (userId) ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE reviewsApp ( + applicationId INT, + employerId INT, + flag BOOLEAN NOT NULL DEFAULT FALSE, + + PRIMARY KEY (applicationId, employerId), + FOREIGN KEY (applicationId) REFERENCES applications (applicationId) ON UPDATE CASCADE ON DELETE CASCADE, + FOREIGN KEY (employerId) REFERENCES users (userId) ON UPDATE CASCADE ON DELETE CASCADE +); diff --git a/database-files/02-coopalytics-data.sql b/database-files/02-coopalytics-data.sql new file mode 100644 index 0000000000..ec806cf137 --- /dev/null +++ b/database-files/02-coopalytics-data.sql @@ -0,0 +1,545 @@ +USE coopalytics; + +-- 1. Skills table (40 rows - strong entity, no dependencies) +INSERT INTO skills (skillId, name, category) VALUES +(1, 'Python', 'Programming'), +(2, 'Java', 'Programming'), +(3, 'JavaScript', 'Programming'), +(4, 'React', 'Web Development'), +(5, 'Node.js', 'Web Development'), +(6, 'SQL', 'Database'), +(7, 'MongoDB', 'Database'), +(8, 'AWS', 'Cloud'), +(9, 'Docker', 'DevOps'), +(10, 'Git', 'Version Control'), +(11, 'Machine Learning', 'Data Science'), +(12, 'Data Analysis', 'Data Science'), +(13, 'Excel', 'Office'), +(14, 'PowerPoint', 'Office'), +(15, 'Project Management', 'Management'), +(16, 'Agile', 'Management'), +(17, 'Communication', 'Soft Skills'), +(18, 'Leadership', 'Soft Skills'), +(19, 'Problem Solving', 'Soft Skills'), +(20, 'Teamwork', 'Soft Skills'), +(21, 'C++', 'Programming'), +(22, 'C#', 'Programming'), +(23, 'PHP', 'Programming'), +(24, 'Ruby', 'Programming'), +(25, 'Swift', 'Programming'), +(26, 'Kotlin', 'Programming'), +(27, 'Angular', 'Web Development'), +(28, 'Vue.js', 'Web Development'), +(29, 'CSS', 'Web Development'), +(30, 'HTML', 'Web Development'), +(31, 'PostgreSQL', 'Database'), +(32, 'MySQL', 'Database'), +(33, 'Azure', 'Cloud'), +(34, 'GCP', 'Cloud'), +(35, 'Kubernetes', 'DevOps'), +(36, 'Jenkins', 'DevOps'), +(37, 'Tableau', 'Data Science'), +(38, 'R', 'Data Science'), +(39, 'Adobe Creative', 'Design'), +(40, 'UI/UX Design', 'Design'); + +-- 2. Company Profiles table (35 rows - strong entity, no dependencies) +INSERT INTO companyProfiles (companyProfileId, name, bio, industry, websiteLink) VALUES +(1, 'TechNova Inc', 'Leading software development company specializing in enterprise solutions and cloud infrastructure.', 'Technology', 'www.technova.com'), +(2, 'DataFlow Analytics', 'Data science consulting firm helping businesses make data-driven decisions through advanced analytics.', 'Technology', 'www.dataflow.com'), +(3, 'GreenTech Solutions', 'Environmental technology company focused on sustainable energy and green infrastructure solutions.', 'Environmental', 'www.greentech.com'), +(4, 'FinanceFirst Corp', 'Financial services company providing investment management and banking solutions to corporate clients.', 'Finance', 'www.financefirst.com'), +(5, 'HealthTech Innovations', 'Healthcare technology startup developing AI-powered diagnostic tools and patient management systems.', 'Healthcare', 'www.healthtech.com'), +(6, 'CyberShield Security', 'Cybersecurity firm specializing in threat detection, incident response, and security consulting services.', 'Technology', 'www.cybershield.com'), +(7, 'BioResearch Labs', 'Biotechnology research company focused on drug discovery and medical device development.', 'Healthcare', 'www.bioresearch.com'), +(8, 'CloudFirst Technologies', 'Cloud infrastructure provider offering scalable solutions for enterprise digital transformation.', 'Technology', 'www.cloudfirst.com'), +(9, 'MarketPulse Agency', 'Digital marketing agency specializing in social media strategy and content marketing campaigns.', 'Marketing', 'www.marketpulse.com'), +(10, 'AutoMech Industries', 'Manufacturing company specializing in automotive parts and industrial automation systems.', 'Manufacturing', 'www.automech.com'), +(11, 'EduTech Platform', 'Educational technology company developing online learning platforms and student management systems.', 'Education', 'www.edutech.com'), +(12, 'RetailMax Solutions', 'Retail technology provider offering point-of-sale systems and inventory management solutions.', 'Retail', 'www.retailmax.com'), +(13, 'EnergyFlow Corp', 'Renewable energy company focused on solar and wind power generation and distribution systems.', 'Energy', 'www.energyflow.com'), +(14, 'LogiTrans Systems', 'Logistics and transportation company providing supply chain management and delivery solutions.', 'Logistics', 'www.logitrans.com'), +(15, 'DesignStudio Pro', 'Creative design agency specializing in brand identity, web design, and user experience consulting.', 'Design', 'www.designstudio.com'), +(16, 'AgriTech Innovations', 'Agricultural technology company developing precision farming tools and crop management systems.', 'Agriculture', 'www.agritech.com'), +(17, 'SportsTech Analytics', 'Sports technology company providing performance analytics and fan engagement platforms.', 'Sports', 'www.sportstech.com'), +(18, 'MediaStream Corp', 'Media and entertainment company specializing in streaming platforms and content distribution.', 'Media', 'www.mediastream.com'), +(19, 'RealEstate Plus', 'Real estate technology company offering property management and virtual tour solutions.', 'Real Estate', 'www.realestate.com'), +(20, 'TravelTech Solutions', 'Travel technology provider developing booking platforms and travel management systems.', 'Travel', 'www.traveltech.com'), +(21, 'FoodTech Innovations', 'Food technology company focused on sustainable food production and delivery optimization.', 'Food', 'www.foodtech.com'), +(22, 'InsureTech Corp', 'Insurance technology company providing digital insurance platforms and risk assessment tools.', 'Insurance', 'www.insuretech.com'), +(23, 'GameDev Studios', 'Video game development company creating mobile and console games with immersive experiences.', 'Gaming', 'www.gamedev.com'), +(24, 'LegalTech Solutions', 'Legal technology provider offering case management systems and document automation tools.', 'Legal', 'www.legaltech.com'), +(25, 'ConstructTech Pro', 'Construction technology company providing project management and building information modeling.', 'Construction', 'www.constructtech.com'), +(26, 'PharmaResearch Inc', 'Pharmaceutical research company focused on drug development and clinical trial management.', 'Pharmaceutical', 'www.pharmaresearch.com'), +(27, 'AeroSpace Dynamics', 'Aerospace engineering company developing aircraft systems and space exploration technologies.', 'Aerospace', 'www.aerospace.com'), +(28, 'TextileTech Corp', 'Textile manufacturing company specializing in smart fabrics and sustainable clothing production.', 'Textile', 'www.textiletech.com'), +(29, 'MiningTech Solutions', 'Mining technology provider offering equipment automation and resource extraction optimization.', 'Mining', 'www.miningtech.com'), +(30, 'WaterTech Systems', 'Water technology company developing purification systems and water resource management solutions.', 'Water', 'www.watertech.com'), +(31, 'RoboTech Industries', 'Robotics company creating industrial automation solutions and service robots for various sectors.', 'Robotics', 'www.robotech.com'), +(32, 'ChemTech Labs', 'Chemical technology company specializing in materials science and chemical process optimization.', 'Chemical', 'www.chemtech.com'), +(33, 'TransportTech Corp', 'Transportation technology provider developing autonomous vehicle systems and traffic management.', 'Transportation', 'www.transporttech.com'), +(34, 'SecurityTech Pro', 'Physical security technology company offering surveillance systems and access control solutions.', 'Security', 'www.securitytech.com'), +(35, 'CleanTech Innovations', 'Clean technology company focused on waste management and environmental remediation solutions.', 'Environmental', 'www.cleantech.com'); + +-- 3. Users table (48 rows - references companyProfiles) +INSERT INTO users (userId, firstName, lastName, email, phone, major, minor, college, gradYear, grade, companyProfileId, industry) VALUES +-- Students (userId 1-30) +(1, 'Charlie', 'Stout', 'c.stout@student.edu', '555-0101', 'Computer Science', 'Mathematics', 'Khoury College of Computer Sciences', '2026', 'Junior', NULL, NULL), +(2, 'Liam', 'Williams', 'l.williams@student.edu', '555-0102', 'Business', 'Economics', 'D\'Amore-McKim School of Business', '2025', 'Senior', NULL, NULL), +(3, 'Sophia', 'Brown', 's.brown@student.edu', '555-0103', 'Mechanical Engineering', 'Physics', 'College of Engineering', '2027', 'Sophomore', NULL, NULL), +(4, 'Noah', 'Davis', 'n.davis@student.edu', '555-0104', 'Data Science', NULL, 'Khoury College of Computer Sciences', '2026', 'Junior', NULL, NULL), +(5, 'Olivia', 'Miller', 'o.miller@student.edu', '555-0105', 'Marketing', 'Psychology', 'D\'Amore-McKim School of Business', '2025', 'Senior', NULL, NULL), +(6, 'Mason', 'Wilson', 'm.wilson@student.edu', '555-0106', 'Cybersecurity', NULL, 'Khoury College of Computer Sciences', '2026', 'Junior', NULL, NULL), +(7, 'Ava', 'Moore', 'a.moore@student.edu', '555-0107', 'Biomedical Engineering', 'Chemistry', 'College of Engineering', '2027', 'Sophomore', NULL, NULL), +(8, 'Ethan', 'Taylor', 'e.taylor@student.edu', '555-0108', 'Finance', NULL, 'D\'Amore-McKim School of Business', '2025', 'Senior', NULL, NULL), +(9, 'Isabella', 'Anderson', 'i.anderson@student.edu', '555-0109', 'Psychology', 'Sociology', 'College of Social Sciences and Humanities', '2026', 'Junior', NULL, NULL), +(10, 'James', 'Thomas', 'j.thomas@student.edu', '555-0110', 'Mechanical Engineering', NULL, 'College of Engineering', '2027', 'Sophomore', NULL, NULL), +(11, 'Mia', 'Jackson', 'm.jackson@student.edu', '555-0111', 'Computer Science', NULL, 'Khoury College of Computer Sciences', '2025', 'Senior', NULL, NULL), +(12, 'Lucas', 'White', 'l.white@student.edu', '555-0112', 'Business', 'Data Science', 'D\'Amore-McKim School of Business', '2026', 'Junior', NULL, NULL), +(13, 'Charlotte', 'Harris', 'c.harris@student.edu', '555-0113', 'Environmental Engineering', 'Biology', 'College of Engineering', '2027', 'Sophomore', NULL, NULL), +(14, 'Benjamin', 'Martin', 'b.martin@student.edu', '555-0114', 'Information Systems', NULL, 'Khoury College of Computer Sciences', '2025', 'Senior', NULL, NULL), +(15, 'Amelia', 'Garcia', 'a.garcia@student.edu', '555-0115', 'Physics', 'Mathematics', 'College of Science', '2026', 'Junior', NULL, NULL), +(16, 'Henry', 'Rodriguez', 'h.rodriguez@student.edu', '555-0116', 'Computer Science', 'Mathematics', 'Khoury College of Computer Sciences', '2027', 'Sophomore', NULL, NULL), +(17, 'Harper', 'Lewis', 'h.lewis@student.edu', '555-0117', 'Design', 'Art', 'College of Arts, Media and Design', '2025', 'Senior', NULL, NULL), +(18, 'Alexander', 'Lee', 'a.lee@student.edu', '555-0118', 'Electrical Engineering', NULL, 'College of Engineering', '2026', 'Junior', NULL, NULL), +(19, 'Evelyn', 'Walker', 'e.walker@student.edu', '555-0119', 'International Business', 'Spanish', 'D\'Amore-McKim School of Business', '2027', 'Sophomore', NULL, NULL), +(20, 'Sebastian', 'Hall', 's.hall@student.edu', '555-0120', 'Data Science', NULL, 'Khoury College of Computer Sciences', '2025', 'Senior', NULL, NULL), +(21, 'Aria', 'Allen', 'a.allen@student.edu', '555-0121', 'Marketing', NULL, 'D\'Amore-McKim School of Business', '2026', 'Junior', NULL, NULL), +(22, 'Owen', 'Young', 'o.young@student.edu', '555-0122', 'Computer Science', NULL, 'Khoury College of Computer Sciences', '2027', 'Sophomore', NULL, NULL), +(23, 'Luna', 'King', 'l.king@student.edu', '555-0123', 'Business', 'Finance', 'D\'Amore-McKim School of Business', '2025', 'Senior', NULL, NULL), +(24, 'Grayson', 'Wright', 'g.wright@student.edu', '555-0124', 'Cybersecurity', NULL, 'Khoury College of Computer Sciences', '2026', 'Junior', NULL, NULL), +(25, 'Chloe', 'Lopez', 'c.lopez@student.edu', '555-0125', 'Biology', 'Chemistry', 'College of Science', '2027', 'Sophomore', NULL, NULL), +(26, 'Carter', 'Hill', 'c.hill@student.edu', '555-0126', 'Information Systems', 'Business', 'Khoury College of Computer Sciences', '2025', 'Senior', NULL, NULL), +(27, 'Zoey', 'Scott', 'z.scott@student.edu', '555-0127', 'Environmental Engineering', NULL, 'College of Engineering', '2026', 'Junior', NULL, NULL), +(28, 'Luke', 'Green', 'l.green@student.edu', '555-0128', 'Chemistry', 'Mathematics', 'College of Science', '2027', 'Sophomore', NULL, NULL), +(29, 'Lily', 'Adams', 'l.adams@student.edu', '555-0129', 'Design', NULL, 'College of Arts, Media and Design', '2025', 'Senior', NULL, NULL), +(30, 'Jack', 'Baker', 'j.baker@student.edu', '555-0130', 'Computer Science', NULL, 'Khoury College of Computer Sciences', '2026', 'Junior', NULL, NULL), +-- Advisors (userId 31-36) +(31, 'Sarah', 'Martinez', 's.martinez@neu.edu', '555-0301', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Academic'), +(32, 'Michael', 'Chen', 'm.chen@neu.edu', '555-0302', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Academic'), +(33, 'Jennifer', 'Kim', 'j.kim@neu.edu', '555-0303', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Academic'), +(34, 'David', 'Johnson', 'd.johnson@neu.edu', '555-0304', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Academic'), +(35, 'Lisa', 'Thompson', 'l.thompson@neu.edu', '555-0305', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Academic'), +(36, 'Robert', 'Wilson', 'r.wilson@neu.edu', '555-0306', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Academic'), +-- Employers (userId 37-44) +(37, 'Phoebe', 'Hwang', 'p.hwang@technova.com', '555-0401', NULL, NULL, NULL, NULL, NULL, 1, 'Technology'), +(38, 'Marcus', 'Roberts', 'm.roberts@dataflow.com', '555-0402', NULL, NULL, NULL, NULL, NULL, 2, 'Technology'), +(39, 'Elena', 'Thompson', 'e.thompson@greenenergy.com', '555-0403', NULL, NULL, NULL, NULL, NULL, 3, 'Energy'), +(40, 'James', 'Martinez', 'j.martinez@healthtech.com', '555-0404', NULL, NULL, NULL, NULL, NULL, 4, 'Healthcare'), +(41, 'Rachel', 'Anderson', 'r.anderson@financefirst.com', '555-0405', NULL, NULL, NULL, NULL, NULL, 5, 'Finance'), +(42, 'Daniel', 'Clark', 'd.clark@autoinnovate.com', '555-0406', NULL, NULL, NULL, NULL, NULL, 6, 'Automotive'), +(43, 'Amanda', 'Lewis', 'a.lewis@cloudsecure.com', '555-0407', NULL, NULL, NULL, NULL, NULL, 7, 'Technology'), +(44, 'Christopher', 'Walker', 'c.walker@bioresearch.com', '555-0408', NULL, NULL, NULL, NULL, NULL, 8, 'Healthcare'), +-- Admins (userId 45-48) +(45, 'Kaelyn', 'Dunn', 'k.dunn@neu.edu', '555-0501', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Administration'), +(46, 'Tyler', 'Rodriguez', 't.rodriguez@neu.edu', '555-0502', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Administration'), +(47, 'Madison', 'Foster', 'm.foster@neu.edu', '555-0503', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Administration'), +(48, 'Jordan', 'Bell', 'j.bell@neu.edu', '555-0504', NULL, NULL, 'NEU', NULL, NULL, NULL, 'Administration'); + +-- 4. Demographics table (48 rows - references users) +INSERT INTO demographics (demographicId, gender, race, nationality, sexuality, disability) VALUES +-- Students (1-30) +(1, 'Male', 'White', 'American', 'Heterosexual', NULL), +(2, 'Male', 'Hispanic', 'American', 'Heterosexual', NULL), +(3, 'Female', 'Asian', 'American', 'Heterosexual', NULL), +(4, 'Male', 'Black', 'American', 'Heterosexual', NULL), +(5, 'Female', 'White', 'American', 'Bisexual', NULL), +(6, 'Male', 'White', 'American', 'Heterosexual', 'ADHD'), +(7, 'Female', 'Mixed Race', 'American', 'Heterosexual', NULL), +(8, 'Male', 'Asian', 'International', 'Heterosexual', NULL), +(9, 'Female', 'Hispanic', 'American', 'Heterosexual', 'Anxiety'), +(10, 'Male', 'White', 'American', 'Gay', NULL), +(11, 'Female', 'Black', 'American', 'Lesbian', NULL), +(12, 'Male', 'White', 'American', 'Heterosexual', NULL), +(13, 'Female', 'Native American', 'American', 'Heterosexual', NULL), +(14, 'Male', 'Asian', 'American', 'Heterosexual', 'Dyslexia'), +(15, 'Female', 'White', 'International', 'Heterosexual', NULL), +(16, 'Male', 'Hispanic', 'American', 'Bisexual', NULL), +(17, 'Female', 'Asian', 'American', 'Heterosexual', NULL), +(18, 'Male', 'Black', 'American', 'Heterosexual', NULL), +(19, 'Female', 'White', 'International', 'Heterosexual', NULL), +(20, 'Male', 'Mixed Race', 'American', 'Heterosexual', 'Depression'), +(21, 'Female', 'Hispanic', 'American', 'Heterosexual', NULL), +(22, 'Male', 'White', 'American', 'Heterosexual', NULL), +(23, 'Female', 'Asian', 'International', 'Heterosexual', NULL), +(24, 'Male', 'White', 'American', 'Gay', NULL), +(25, 'Female', 'Black', 'American', 'Heterosexual', NULL), +(26, 'Male', 'Hispanic', 'American', 'Heterosexual', 'Autism'), +(27, 'Female', 'White', 'American', 'Bisexual', NULL), +(28, 'Male', 'Asian', 'American', 'Heterosexual', NULL), +(29, 'Female', 'Mixed Race', 'American', 'Heterosexual', NULL), +(30, 'Male', 'White', 'American', 'Heterosexual', NULL), +-- Advisors (31-36) +(31, 'Female', 'Hispanic', 'American', 'Heterosexual', NULL), +(32, 'Male', 'Asian', 'American', 'Heterosexual', NULL), +(33, 'Female', 'Korean', 'American', 'Heterosexual', NULL), +(34, 'Male', 'White', 'American', 'Heterosexual', NULL), +(35, 'Female', 'Black', 'American', 'Heterosexual', NULL), +(36, 'Male', 'White', 'American', 'Gay', NULL), +-- Employers (37-44) +(37, 'Female', 'Asian', 'American', 'Heterosexual', NULL), +(38, 'Male', 'White', 'American', 'Heterosexual', NULL), +(39, 'Female', 'Hispanic', 'American', 'Bisexual', NULL), +(40, 'Male', 'Hispanic', 'American', 'Heterosexual', NULL), +(41, 'Female', 'Black', 'American', 'Heterosexual', NULL), +(42, 'Male', 'White', 'American', 'Heterosexual', NULL), +(43, 'Female', 'White', 'American', 'Lesbian', NULL), +(44, 'Male', 'Mixed Race', 'American', 'Heterosexual', NULL), +-- Admins (45-48) +(45, 'Female', 'White', 'American', 'Heterosexual', NULL), +(46, 'Male', 'Hispanic', 'American', 'Heterosexual', NULL), +(47, 'Female', 'Asian', 'American', 'Bisexual', NULL), +(48, 'Non-binary', 'Black', 'American', 'Pansexual', NULL); + +-- 5. Coop Positions table (50 rows - references skills) +INSERT INTO coopPositions (coopPositionId, title, location, description, hourlyPay, requiredSkillsId, desiredSkillsId, desiredGPA, deadline, startDate, endDate, flag, industry) VALUES +(1, 'Software Developer Intern', 'Boston, MA', 'Develop web applications using modern frameworks and participate in agile development processes.', 22.50, 1, 4, 3.0, '2025-02-15 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(2, 'Data Analyst Co-op', 'Cambridge, MA', 'Analyze business data and create reports using SQL and Python for data-driven insights.', 20.00, 12, 11, 3.2, '2025-02-20 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(3, 'Marketing Assistant', 'New York, NY', 'Support digital marketing campaigns and social media strategy development.', 18.50, 17, 14, 2.8, '2025-03-01 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Marketing'), +(4, 'Cybersecurity Intern', 'Burlington, MA', 'Assist with security assessments and vulnerability testing in cloud environments.', 25.00, 6, 8, 3.3, '2025-02-10 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(5, 'Financial Analyst Co-op', 'Boston, MA', 'Support financial modeling and investment analysis for banking operations.', 21.00, 13, 12, 3.4, '2025-02-25 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Finance'), +(6, 'UX Design Intern', 'San Francisco, CA', 'Create user interface designs and conduct user research for mobile applications.', 24.00, 40, 39, 3.0, '2025-03-05 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(7, 'Biotech Research Co-op', 'Cambridge, MA', 'Conduct laboratory research and assist with clinical trial data analysis.', 19.50, 12, 11, 3.5, '2025-02-28 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Healthcare'), +(8, 'Environmental Engineer', 'Portland, OR', 'Work on renewable energy projects and sustainability assessments.', 23.00, 15, 12, 3.1, '2025-03-10 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Energy'), +(9, 'DevOps Intern', 'Seattle, WA', 'Manage CI/CD pipelines and cloud infrastructure using AWS and Docker.', 26.00, 9, 35, 3.2, '2025-02-12 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(10, 'Business Analyst Co-op', 'Chicago, IL', 'Analyze business processes and requirements for software implementation.', 20.50, 15, 13, 3.0, '2025-03-15 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Consulting'), +(11, 'Machine Learning Intern', 'Austin, TX', 'Develop ML models for predictive analytics and data processing pipelines.', 28.00, 11, 1, 3.6, '2025-02-18 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(12, 'Mobile App Developer', 'Los Angeles, CA', 'Build iOS and Android applications using native and cross-platform technologies.', 24.50, 25, 26, 3.1, '2025-03-20 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(13, 'Supply Chain Analyst', 'Atlanta, GA', 'Optimize logistics operations and analyze supply chain performance metrics.', 19.00, 12, 15, 2.9, '2025-02-22 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Logistics'), +(14, 'Game Developer Intern', 'San Diego, CA', 'Create game mechanics and features using Unity and C# programming.', 22.00, 22, 21, 3.0, '2025-03-25 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Gaming'), +(15, 'Healthcare Data Analyst', 'Philadelphia, PA', 'Analyze patient data and healthcare outcomes for medical research.', 21.50, 12, 37, 3.3, '2025-02-14 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Healthcare'), +(16, 'Full Stack Developer', 'Denver, CO', 'Build end-to-end web applications using React, Node.js, and databases.', 25.50, 3, 5, 3.2, '2025-03-08 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(17, 'Quality Assurance Co-op', 'Miami, FL', 'Test software applications and develop automated testing frameworks.', 18.00, 3, 10, 2.8, '2025-02-26 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(18, 'Robotics Engineer Intern', 'Detroit, MI', 'Design and program robotic systems for manufacturing automation.', 24.00, 21, 11, 3.4, '2025-03-12 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Manufacturing'), +(19, 'Digital Marketing Co-op', 'Nashville, TN', 'Manage social media campaigns and analyze digital marketing performance.', 17.50, 17, 12, 2.7, '2025-02-16 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Marketing'), +(20, 'Cloud Engineer Intern', 'Phoenix, AZ', 'Deploy and manage cloud infrastructure using AWS and Azure platforms.', 27.00, 8, 33, 3.5, '2025-03-18 23:59:59', '2025-06-01', '2025-12-01', FALSE, 'Technology'), +(21, 'Product Manager Co-op', 'San Jose, CA', 'Assist with product roadmap planning and coordinate development teams.', 23.50, 15, 17, 3.1, '2025-02-08 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Technology'), +(22, 'Backend Developer Intern', 'Portland, OR', 'Build server-side applications and APIs using Java and microservices.', 24.00, 2, 6, 3.0, '2025-02-24 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Technology'), +(23, 'Business Intelligence', 'Dallas, TX', 'Create dashboards and reports for executive decision making.', 20.50, 37, 13, 3.2, '2025-03-22 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Finance'), +(24, 'Frontend Developer Co-op', 'Tampa, FL', 'Develop user interfaces using React and modern CSS frameworks.', 22.00, 4, 29, 2.9, '2025-02-11 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Technology'), +(25, 'Research Assistant', 'Baltimore, MD', 'Support biomedical research projects and data collection efforts.', 16.50, 12, 38, 3.4, '2025-03-14 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Healthcare'), +(26, 'Systems Administrator', 'Salt Lake City, UT', 'Maintain IT infrastructure and provide technical support services.', 21.00, 8, 9, 3.0, '2025-02-19 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Technology'), +(27, 'Finance Intern', 'Minneapolis, MN', 'Assist with financial planning and budget analysis for corporate clients.', 19.00, 13, 12, 3.3, '2025-03-28 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Finance'), +(28, 'Software QA Engineer', 'Orlando, FL', 'Design test cases and automate testing procedures for software releases.', 20.00, 10, 3, 3.1, '2025-02-13 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Technology'), +(29, 'Data Engineer Co-op', 'Charlotte, NC', 'Build data pipelines and manage ETL processes for analytics platforms.', 25.00, 1, 7, 3.4, '2025-03-30 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Technology'), +(30, 'Project Coordinator', 'Kansas City, MO', 'Coordinate cross-functional teams and manage project timelines.', 18.00, 15, 16, 2.8, '2025-02-21 23:59:59', '2025-09-01', '2026-03-01', FALSE, 'Consulting'), +(31, 'Web Developer Intern', 'Las Vegas, NV', 'Create responsive websites and web applications for client projects.', 21.50, 30, 4, 3.0, '2025-03-06 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Technology'), +(32, 'Security Analyst Co-op', 'Raleigh, NC', 'Monitor security systems and investigate potential cyber threats.', 23.00, 6, 8, 3.2, '2025-02-17 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Technology'), +(33, 'Operations Analyst', 'Columbus, OH', 'Improve operational efficiency and analyze business performance metrics.', 19.50, 13, 15, 3.0, '2025-03-24 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Operations'), +(34, 'Android Developer', 'Indianapolis, IN', 'Develop native Android applications using Kotlin and Java.', 23.50, 26, 2, 3.1, '2025-02-09 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Technology'), +(35, 'Database Administrator', 'Memphis, TN', 'Manage database systems and optimize query performance.', 22.00, 6, 31, 3.3, '2025-03-16 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Technology'), +(36, 'Sales Analytics Intern', 'Louisville, KY', 'Analyze sales data and create performance reports for management.', 17.00, 12, 14, 2.9, '2025-02-23 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Sales'), +(37, 'IoT Developer Co-op', 'Oklahoma City, OK', 'Develop Internet of Things applications and sensor integration systems.', 24.50, 3, 1, 3.2, '2025-03-26 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Technology'), +(38, 'Technical Writer', 'Richmond, VA', 'Create technical documentation and user manuals for software products.', 18.50, 17, 14, 2.8, '2025-02-15 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Technology'), +(39, 'Blockchain Developer', 'Providence, RI', 'Build decentralized applications and smart contracts using blockchain technology.', 29.00, 3, 22, 3.5, '2025-03-11 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Finance'), +(40, 'AI Research Intern', 'Hartford, CT', 'Conduct artificial intelligence research and develop machine learning algorithms.', 26.50, 11, 1, 3.6, '2025-02-27 23:59:59', '2026-01-01', '2026-07-01', FALSE, 'Technology'), +(41, 'Network Engineer Co-op', 'Bridgeport, CT', 'Design and maintain network infrastructure and troubleshoot connectivity issues.', 21.50, 8, 32, 3.1, '2025-03-19 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Technology'), +(42, 'HR Analytics Intern', 'Newark, NJ', 'Analyze employee data and support human resources decision making.', 17.50, 12, 13, 2.9, '2025-02-12 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Human Resources'), +(43, 'Manufacturing Engineer', 'Buffalo, NY', 'Optimize production processes and implement lean manufacturing principles.', 22.50, 15, 12, 3.3, '2025-03-21 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Manufacturing'), +(44, 'Content Creator Co-op', 'Syracuse, NY', 'Develop multimedia content and manage brand social media presence.', 16.00, 39, 17, 2.7, '2025-02-20 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Marketing'), +(45, 'Automation Engineer', 'Rochester, NY', 'Design automated systems and implement robotic process automation.', 25.50, 1, 21, 3.4, '2025-03-13 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Manufacturing'), +(46, 'Customer Success Intern', 'Albany, NY', 'Support customer onboarding and analyze customer satisfaction metrics.', 17.00, 17, 12, 2.8, '2025-02-25 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Customer Service'), +(47, 'Computer Vision Co-op', 'Burlington, VT', 'Develop image processing algorithms and computer vision applications.', 27.50, 11, 1, 3.5, '2025-03-17 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Technology'), +(48, 'Product Design Intern', 'Manchester, NH', 'Create product prototypes and conduct user experience research.', 20.00, 40, 39, 3.0, '2025-02-14 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Design'), +(49, 'Infrastructure Engineer', 'Portland, ME', 'Manage cloud infrastructure and implement DevOps best practices.', 24.00, 8, 35, 3.2, '2025-03-23 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Technology'), +(50, 'Business Development', 'Concord, NH', 'Identify new business opportunities and support partnership development.', 18.50, 17, 15, 2.9, '2025-02-18 23:59:59', '2026-06-01', '2026-12-01', FALSE, 'Business'); + +-- 6. Applications table (60 rows - references coopPositions) +INSERT INTO applications (applicationId, dateTimeApplied, status, resume, gpa, coverLetter, coopPositionId) VALUES +-- Charlie Stout (CS major, GPA 3.7, has Python, Java, JavaScript, React, etc.) +(1, '2025-01-15 10:30:00', 'Submitted', 'Resume content for Charlie Stout...', 3.7, 'Cover letter expressing interest in software development...', 1), +(2, '2025-01-16 14:20:00', 'Under Review', 'Resume content for Charlie Stout...', 3.7, 'Cover letter for full stack developer...', 16), +-- Liam Williams (Business major, GPA 3.5, has Excel, PowerPoint, Project Management, etc.) +(3, '2025-01-18 09:45:00', 'Submitted', 'Resume content for Liam Williams...', 3.5, 'Cover letter highlighting business experience...', 5), +(4, '2025-01-20 16:15:00', 'Draft', 'Resume content for Liam Williams...', 3.5, NULL, 10), +-- Sophia Brown (Engineering major, GPA 3.8, has C++, Excel, Project Management, etc.) +(5, '2025-01-22 11:30:00', 'Submitted', 'Resume content for Sophia Brown...', 3.8, 'Cover letter for engineering role...', 18), +(6, '2025-01-25 08:45:00', 'Under Review', 'Resume content for Sophia Brown...', 3.8, 'Manufacturing engineering application...', 43), +-- Noah Davis (Data Science major, GPA 3.9, has Python, ML, Data Analysis, etc.) +(7, '2025-01-28 13:20:00', 'Submitted', 'Resume content for Noah Davis...', 3.9, 'Machine learning interest cover letter...', 11), +(8, '2025-01-30 10:00:00', 'Rejected', 'Resume content for Noah Davis...', 3.9, 'Data engineering application...', 29), +-- Additional placement data for analytics +(61, '2025-02-01 09:00:00', 'Accepted', 'Resume content for Charlie Stout...', 3.7, 'Software engineer acceptance...', 1), +(62, '2025-02-02 10:00:00', 'Rejected', 'Resume content for Liam Williams...', 3.5, 'Business analyst rejection...', 5), +(63, '2025-02-03 11:00:00', 'Accepted', 'Resume content for Sophia Brown...', 3.8, 'Engineering acceptance...', 18), +(64, '2025-02-04 12:00:00', 'Accepted', 'Resume content for Noah Davis...', 3.9, 'Data scientist acceptance...', 11), +(65, '2025-02-05 13:00:00', 'Rejected', 'Resume content for Olivia Miller...', 3.4, 'Marketing rejection...', 6), +(66, '2025-02-06 14:00:00', 'Accepted', 'Resume content for Emma Davis...', 3.6, 'Finance acceptance...', 10), + +-- Additional comprehensive application data for expanded analytics +-- Mason Wilson (userId=6) - Cybersecurity applications +(67, '2025-02-07 09:00:00', 'Accepted', 'Resume content for Mason Wilson...', 3.6, 'Cybersecurity acceptance...', 32), +(68, '2025-02-08 10:00:00', 'Rejected', 'Resume content for Mason Wilson...', 3.6, 'Security analyst rejection...', 41), + +-- Ava Moore (userId=7) - Biomedical Engineering applications +(69, '2025-02-09 11:00:00', 'Accepted', 'Resume content for Ava Moore...', 3.8, 'Research acceptance...', 25), +(70, '2025-02-10 12:00:00', 'Rejected', 'Resume content for Ava Moore...', 3.8, 'Healthcare rejection...', 15), + +-- Ethan Taylor (userId=8) - Finance applications +(71, '2025-02-11 13:00:00', 'Accepted', 'Resume content for Ethan Taylor...', 3.5, 'Finance acceptance...', 27), +(72, '2025-02-12 14:00:00', 'Rejected', 'Resume content for Ethan Taylor...', 3.5, 'Investment rejection...', 23), + +-- Isabella Anderson (userId=9) - Psychology/HR applications +(73, '2025-02-13 15:00:00', 'Accepted', 'Resume content for Isabella Anderson...', 3.4, 'HR analytics acceptance...', 42), +(74, '2025-02-14 16:00:00', 'Rejected', 'Resume content for Isabella Anderson...', 3.4, 'People analytics rejection...', 33), + +-- James Thomas (userId=10) - Mechanical Engineering applications +(75, '2025-02-15 17:00:00', 'Accepted', 'Resume content for James Thomas...', 3.9, 'Manufacturing acceptance...', 43), +(76, '2025-02-16 18:00:00', 'Rejected', 'Resume content for James Thomas...', 3.9, 'Robotics rejection...', 18), + +-- Mia Jackson (userId=11) - CS Senior applications +(77, '2025-02-17 09:00:00', 'Accepted', 'Resume content for Mia Jackson...', 3.8, 'Software developer acceptance...', 1), +(78, '2025-02-18 10:00:00', 'Rejected', 'Resume content for Mia Jackson...', 3.8, 'Full stack rejection...', 16), +(79, '2025-02-19 11:00:00', 'Accepted', 'Resume content for Mia Jackson...', 3.8, 'Blockchain acceptance...', 39), + +-- Lucas White (userId=12) - Business with Data Science minor +(80, '2025-02-20 12:00:00', 'Accepted', 'Resume content for Lucas White...', 3.6, 'Business analyst acceptance...', 10), +(81, '2025-02-21 13:00:00', 'Rejected', 'Resume content for Lucas White...', 3.6, 'Data analyst rejection...', 2), + +-- Charlotte Harris (userId=13) - Environmental Engineering +(82, '2025-02-22 14:00:00', 'Accepted', 'Resume content for Charlotte Harris...', 3.7, 'Environmental acceptance...', 8), +(83, '2025-02-23 15:00:00', 'Rejected', 'Resume content for Charlotte Harris...', 3.7, 'Sustainability rejection...', 33), + +-- Benjamin Martin (userId=14) - Information Systems Senior +(84, '2025-02-24 16:00:00', 'Accepted', 'Resume content for Benjamin Martin...', 3.5, 'Systems admin acceptance...', 26), +(85, '2025-02-25 17:00:00', 'Rejected', 'Resume content for Benjamin Martin...', 3.5, 'Database admin rejection...', 35), + +-- Amelia Garcia (userId=15) - Physics major +(86, '2025-02-26 18:00:00', 'Accepted', 'Resume content for Amelia Garcia...', 3.9, 'Research assistant acceptance...', 25), +(87, '2025-02-27 09:00:00', 'Rejected', 'Resume content for Amelia Garcia...', 3.9, 'Data science rejection...', 11), + +-- Henry Rodriguez (userId=16) - CS Sophomore +(88, '2025-02-28 10:00:00', 'Accepted', 'Resume content for Henry Rodriguez...', 3.3, 'QA engineer acceptance...', 17), +(89, '2025-03-01 11:00:00', 'Rejected', 'Resume content for Henry Rodriguez...', 3.3, 'Software dev rejection...', 1), + +-- Harper Lewis (userId=17) - Design Senior +(90, '2025-03-02 12:00:00', 'Accepted', 'Resume content for Harper Lewis...', 3.8, 'UX design acceptance...', 6), +(91, '2025-03-03 13:00:00', 'Rejected', 'Resume content for Harper Lewis...', 3.8, 'Product design rejection...', 48), + +-- Alexander Lee (userId=18) - Electrical Engineering +(92, '2025-03-04 14:00:00', 'Accepted', 'Resume content for Alexander Lee...', 3.7, 'Robotics acceptance...', 18), +(93, '2025-03-05 15:00:00', 'Rejected', 'Resume content for Alexander Lee...', 3.7, 'Hardware rejection...', 45), + +-- Evelyn Walker (userId=19) - International Business +(94, '2025-03-06 16:00:00', 'Accepted', 'Resume content for Evelyn Walker...', 3.6, 'Business dev acceptance...', 50), +(95, '2025-03-07 17:00:00', 'Rejected', 'Resume content for Evelyn Walker...', 3.6, 'Marketing rejection...', 3), + +-- Sebastian Hall (userId=20) - Data Science Senior +(96, '2025-03-08 18:00:00', 'Accepted', 'Resume content for Sebastian Hall...', 3.9, 'Data analyst acceptance...', 2), +(97, '2025-03-09 09:00:00', 'Rejected', 'Resume content for Sebastian Hall...', 3.9, 'ML engineer rejection...', 11), +(98, '2025-03-10 10:00:00', 'Accepted', 'Resume content for Sebastian Hall...', 3.9, 'Computer vision acceptance...', 47), + +-- Aria Allen (userId=21) - Marketing major +(99, '2025-03-11 11:00:00', 'Accepted', 'Resume content for Aria Allen...', 3.4, 'Marketing assistant acceptance...', 3), +(100, '2025-03-12 12:00:00', 'Rejected', 'Resume content for Aria Allen...', 3.4, 'Digital marketing rejection...', 19), + +-- Owen Young (userId=22) - CS Sophomore +(101, '2025-03-13 13:00:00', 'Accepted', 'Resume content for Owen Young...', 3.5, 'QA engineer acceptance...', 28), +(102, '2025-03-14 14:00:00', 'Rejected', 'Resume content for Owen Young...', 3.5, 'Frontend dev rejection...', 24), + +-- Luna King (userId=23) - Business Senior with Finance focus +(103, '2025-03-15 15:00:00', 'Accepted', 'Resume content for Luna King...', 3.7, 'Finance intern acceptance...', 27), +(104, '2025-03-16 16:00:00', 'Rejected', 'Resume content for Luna King...', 3.7, 'Business intelligence rejection...', 23), +(105, '2025-03-17 17:00:00', 'Accepted', 'Resume content for Luna King...', 3.7, 'Blockchain dev acceptance...', 39), + +-- Grayson Wright (userId=24) - Cybersecurity major +(106, '2025-03-18 18:00:00', 'Accepted', 'Resume content for Grayson Wright...', 3.6, 'Security analyst acceptance...', 32), +(107, '2025-03-19 09:00:00', 'Rejected', 'Resume content for Grayson Wright...', 3.6, 'Penetration testing rejection...', 41), + +-- Chloe Lopez (userId=25) - Biology major +(108, '2025-03-20 10:00:00', 'Accepted', 'Resume content for Chloe Lopez...', 3.8, 'Healthcare data acceptance...', 15), +(109, '2025-03-21 11:00:00', 'Rejected', 'Resume content for Chloe Lopez...', 3.8, 'Biotech rejection...', 7), +-- Olivia Miller (Marketing major, GPA 3.4, has Excel, PowerPoint, Communication, etc.) +(9, '2025-02-01 15:30:00', 'Draft', 'Resume content for Olivia Miller...', 3.4, NULL, 3), +(10, '2025-02-03 12:45:00', 'Submitted', 'Resume content for Olivia Miller...', 3.4, 'Digital marketing interest...', 19); + +-- 7. Skill Details table (sample rows for testing - references skills and users) +INSERT INTO skillDetails (skillId, studentId, proficiencyLevel) VALUES +-- Student 1 (Charlie Stout) - Computer Science major +(1, 1, 4), (2, 1, 3), (3, 1, 5), (4, 1, 4), (5, 1, 3), (6, 1, 4), (10, 1, 5), (17, 1, 4), (19, 1, 4), (20, 1, 5), +-- Student 2 (Liam Williams) - Business major +(13, 2, 5), (14, 2, 4), (15, 2, 4), (16, 2, 3), (17, 2, 5), (18, 2, 4), (19, 2, 4), (20, 2, 5), +-- Student 3 (Sophia Brown) - Engineering major +(21, 3, 4), (13, 3, 4), (15, 3, 5), (19, 3, 4), (20, 3, 5), (17, 3, 3), (12, 3, 3), +-- Student 4 (Noah Davis) - Data Science major +(1, 4, 5), (11, 4, 5), (12, 4, 5), (37, 4, 4), (38, 4, 4), (6, 4, 4), (17, 4, 4), (19, 4, 5), (20, 4, 5), +-- Student 5 (Olivia Miller) - Marketing major +(13, 5, 4), (14, 5, 5), (17, 5, 5), (18, 5, 4), (19, 5, 4), (20, 5, 5), (12, 5, 3); + +-- 7. Creates Position relationships (bridge table - references users and coopPositions) +INSERT INTO createsPos (employerId, coopPositionId) VALUES +(37, 1), (37, 2), (37, 3), (37, 4), (37, 5), (37, 6), (37, 7), (37, 8), (37, 9), (37, 10), +(38, 11), (38, 12), (38, 13), (38, 14), (38, 15), (38, 16), (38, 17), (38, 18), (38, 19), (38, 20), +(39, 21), (39, 22), (39, 23), (39, 24), (39, 25), (39, 26), (39, 27), (39, 28), (39, 29), (39, 30), +(40, 31), (40, 32), (40, 33), (40, 34), (40, 35), (40, 36), (40, 37), (40, 38), (40, 39), (40, 40), +(41, 41), (41, 42), (41, 43), (41, 44), (41, 45), (41, 46), (41, 47), (41, 48), (41, 49), (41, 50); + +-- 7. Advisor-Advisee relationships (bridge table - references users) +INSERT INTO advisor_advisee (advisorId, studentId) VALUES +-- Sarah Martinez (advisor 31) advises students 1-25 (expanded for comprehensive analytics) +(31, 1), (31, 2), (31, 3), (31, 4), (31, 5), (31, 6), (31, 7), (31, 8), (31, 9), (31, 10), +(31, 11), (31, 12), (31, 13), (31, 14), (31, 15), (31, 16), (31, 17), (31, 18), (31, 19), (31, 20), +(31, 21), (31, 22), (31, 23), (31, 24), (31, 25), +-- Michael Chen (advisor 32) advises students 26-30 (reduced set) +(32, 26), (32, 27), (32, 28), (32, 29), (32, 30), +-- Jennifer Kim (advisor 33) advises some graduate students (if any) +(33, 31), (33, 32), (33, 33), (33, 34), (33, 35); + +-- 8. Applies To App relationships (bridge table - references applications and users) +INSERT INTO appliesToApp (applicationId, studentId) VALUES +(1, 1), (2, 1), (3, 2), (4, 2), (5, 3), (6, 3), (7, 4), (8, 4), (9, 5), (10, 5), +-- Additional placement data relationships +(61, 1), (62, 2), (63, 3), (64, 4), (65, 5), (66, 6), +-- Comprehensive application relationships for expanded dataset +(67, 6), (68, 6), (69, 7), (70, 7), (71, 8), (72, 8), (73, 9), (74, 9), (75, 10), (76, 10), +(77, 11), (78, 11), (79, 11), (80, 12), (81, 12), (82, 13), (83, 13), (84, 14), (85, 14), +(86, 15), (87, 15), (88, 16), (89, 16), (90, 17), (91, 17), (92, 18), (93, 18), (94, 19), (95, 19), +(96, 20), (97, 20), (98, 20), (99, 21), (100, 21), (101, 22), (102, 22), (103, 23), (104, 23), (105, 23), +(106, 24), (107, 24), (108, 25), (109, 25); + +-- 9. Past Co-op Experience (workedAtPos table - completed co-op positions) +INSERT INTO workedAtPos (studentId, coopPositionId, startDate, endDate, companyRating) VALUES +-- Original entries (keep existing) +(1, 2, '2024-01-15', '2024-06-15', 5), +(2, 7, '2024-03-01', '2024-08-31', 4), +(3, 19, '2023-09-01', '2024-02-29', 5), +(4, 12, '2024-01-01', '2024-06-30', 4), +(5, 8, '2023-06-01', '2023-12-31', 3), + +-- Additional entries for more comprehensive data + +-- Mason Wilson (userId=6) - Cybersecurity major, completed security co-op +(6, 4, '2023-09-01', '2024-02-29', 4), + +-- Ava Moore (userId=7) - Biomedical Engineering, completed research co-op +(7, 7, '2023-06-01', '2023-12-31', 5), + +-- Ethan Taylor (userId=8) - Finance major, completed financial analyst co-op +(8, 5, '2024-01-15', '2024-06-15', 4), + +-- Isabella Anderson (userId=9) - Psychology major, completed HR analytics co-op +(9, 42, '2023-06-01', '2023-12-31', 3), + +-- James Thomas (userId=10) - Mechanical Engineering, completed manufacturing co-op +(10, 43, '2024-01-01', '2024-06-30', 5), + +-- Mia Jackson (userId=11) - CS Senior, completed multiple co-ops +(11, 1, '2023-01-15', '2023-06-15', 4), -- First co-op - Software Developer +(11, 16, '2023-09-01', '2024-02-29', 5), -- Second co-op - Full Stack Developer + +-- Lucas White (userId=12) - Business major with Data Science minor +(12, 10, '2023-06-01', '2023-12-31', 4), -- Business Analyst co-op + +-- Charlotte Harris (userId=13) - Environmental Engineering +(13, 8, '2024-01-15', '2024-06-15', 5), -- Environmental Engineer co-op + +-- Benjamin Martin (userId=14) - Information Systems Senior +(14, 26, '2023-01-15', '2023-06-15', 3), -- Systems Administrator co-op +(14, 35, '2023-09-01', '2024-02-29', 4), -- Database Administrator co-op + +-- Amelia Garcia (userId=15) - Physics major +(15, 25, '2023-06-01', '2023-12-31', 4), -- Research Assistant co-op + +-- Henry Rodriguez (userId=16) - CS Sophomore (first co-op) +(16, 17, '2024-01-01', '2024-06-30', 3), -- QA co-op (entry level) + +-- Harper Lewis (userId=17) - Design Senior +(17, 6, '2023-01-15', '2023-06-15', 5), -- UX Design co-op +(17, 48, '2023-09-01', '2024-02-29', 4), -- Product Design co-op + +-- Alexander Lee (userId=18) - Electrical Engineering +(18, 18, '2023-06-01', '2023-12-31', 4), -- Robotics Engineer co-op + +-- Evelyn Walker (userId=19) - International Business +(19, 50, '2024-01-15', '2024-06-15', 4), -- Business Development co-op + +-- Sebastian Hall (userId=20) - Data Science Senior +(20, 2, '2023-01-15', '2023-06-15', 5), -- Data Analyst co-op +(20, 11, '2023-09-01', '2024-02-29', 5), -- Machine Learning co-op + +-- Aria Allen (userId=21) - Marketing major +(21, 3, '2023-06-01', '2023-12-31', 3), -- Marketing Assistant co-op +(21, 19, '2024-01-01', '2024-06-30', 4), -- Digital Marketing co-op + +-- Owen Young (userId=22) - CS Sophomore (first co-op) +(22, 28, '2024-01-15', '2024-06-15', 4), -- Software QA Engineer + +-- Luna King (userId=23) - Business Senior with Finance focus +(23, 27, '2023-01-15', '2023-06-15', 4), -- Finance Intern +(23, 23, '2023-09-01', '2024-02-29', 5), -- Business Intelligence + +-- Grayson Wright (userId=24) - Cybersecurity major +(24, 32, '2023-06-01', '2023-12-31', 4), -- Security Analyst co-op + +-- Chloe Lopez (userId=25) - Biology major +(25, 15, '2024-01-01', '2024-06-30', 5), -- Healthcare Data Analyst + +-- Carter Hill (userId=26) - Information Systems Senior +(26, 41, '2023-01-15', '2023-06-15', 3), -- Network Engineer co-op + +-- Zoey Scott (userId=27) - Environmental Engineering +(27, 33, '2023-06-01', '2023-12-31', 4), -- Operations Analyst co-op + +-- Luke Green (userId=28) - Chemistry major +(28, 25, '2024-01-15', '2024-06-15', 4), -- Research Assistant co-op + +-- Lily Adams (userId=29) - Design Senior +(29, 44, '2023-01-15', '2023-06-15', 3), -- Content Creator co-op + +-- Jack Baker (userId=30) - CS Junior +(30, 24, '2023-09-01', '2024-02-29', 4), -- Frontend Developer co-op + +-- Additional second co-ops for some students to show progression + +-- Charlie Stout (userId=1) - second co-op after first successful one +(1, 22, '2024-09-01', '2025-02-28', NULL), -- Backend Developer (current/recent) + +-- Sophia Brown (userId=3) - second engineering co-op +(3, 45, '2024-06-01', '2024-11-30', NULL), -- Automation Engineer (current/recent) + +-- Noah Davis (userId=4) - advanced data science co-op +(4, 40, '2024-09-01', '2025-02-28', NULL), -- AI Research Intern (current/recent) + +-- Mia Jackson (userId=11) - third co-op as senior +(11, 39, '2024-06-01', '2024-11-30', NULL), -- Blockchain Developer (current/recent) + +-- Sebastian Hall (userId=20) - third advanced co-op +(20, 47, '2024-06-01', '2024-11-30', NULL), -- Computer Vision co-op (current/recent) + +-- Luna King (userId=23) - advanced finance role +(23, 39, '2024-06-01', '2024-11-30', NULL); -- Blockchain Developer (current/recent) + +-- 9. Worked At Position relationships with company ratings (bridge table - references users and coopPositions) +INSERT INTO workedAtPos (studentId, coopPositionId, startDate, endDate, companyRating) VALUES +-- Students who completed co-ops with ratings +(1, 1, '2024-06-01', '2024-12-01', 4), -- Charlie Stout at TechNova Inc +(2, 5, '2024-06-01', '2024-12-01', 5), -- Liam Williams at FinanceFirst Corp +(3, 18, '2024-06-01', '2024-12-01', 4), -- Sophia Brown at AutoMech Industries +(4, 11, '2024-06-01', '2024-12-01', 5), -- Noah Davis at DataFlow Analytics +(5, 3, '2024-06-01', '2024-12-01', 3), -- Olivia Miller at TechNova Inc +(6, 4, '2024-06-01', '2024-12-01', 4), -- Mason Wilson at TechNova Inc +(7, 7, '2024-06-01', '2024-12-01', 4), -- Ava Moore at BioResearch Labs +(8, 5, '2024-06-01', '2024-12-01', 3), -- Ethan Taylor at FinanceFirst Corp +(9, 3, '2024-06-01', '2024-12-01', 4), -- Isabella Anderson at TechNova Inc +(10, 18, '2024-06-01', '2024-12-01', 5), -- James Thomas at AutoMech Industries +(11, 16, '2024-06-01', '2024-12-01', 4), -- Mia Jackson at TechNova Inc +(12, 10, '2024-06-01', '2024-12-01', 3), -- Lucas White at TechNova Inc +(13, 8, '2024-06-01', '2024-12-01', 4), -- Charlotte Harris at GreenTech Solutions +(14, 2, '2024-06-01', '2024-12-01', 5), -- Benjamin Martin at TechNova Inc +(15, 15, '2024-06-01', '2024-12-01', 4), -- Amelia Garcia at BioResearch Labs +(16, 1, '2024-06-01', '2024-12-01', 3), -- Henry Rodriguez at TechNova Inc +(17, 6, '2024-06-01', '2024-12-01', 4), -- Harper Lewis at TechNova Inc +(18, 9, '2024-06-01', '2024-12-01', 5), -- Alexander Lee at TechNova Inc +(19, 10, '2024-06-01', '2024-12-01', 3), -- Evelyn Walker at TechNova Inc +(20, 2, '2024-06-01', '2024-12-01', 4), -- Sebastian Hall at TechNova Inc +(21, 19, '2024-06-01', '2024-12-01', 4), -- Aria Allen at TechNova Inc +(22, 1, '2024-06-01', '2024-12-01', 3), -- Owen Young at TechNova Inc +(23, 5, '2024-06-01', '2024-12-01', 4), -- Luna King at FinanceFirst Corp +(24, 4, '2024-06-01', '2024-12-01', 5), -- Grayson Wright at TechNova Inc +(25, 7, '2024-06-01', '2024-12-01', 3), -- Chloe Lopez at BioResearch Labs +(26, 2, '2024-06-01', '2024-12-01', 4), -- Carter Hill at TechNova Inc +(27, 8, '2024-06-01', '2024-12-01', 5), -- Zoey Scott at GreenTech Solutions +(28, 7, '2024-06-01', '2024-12-01', 4), -- Luke Green at BioResearch Labs +(29, 6, '2024-06-01', '2024-12-01', 3), -- Lily Adams at TechNova Inc +(30, 1, '2024-06-01', '2024-12-01', 4); -- Jack Baker at TechNova Inc +