Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sqlite setup #65

Merged
merged 9 commits into from
Sep 15, 2023
Merged
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@ mysql/sql/01_data.sql
# django
django/**/migrations/**
django/university/models.py
django/statistics.sql

# celery
django/celerybeat-schedule

# stats related graphs
django/university/stats_graphs/*
4 changes: 3 additions & 1 deletion django/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ python manage.py inspectdb > university/models.py
python manage.py makemigrations
python manage.py migrate university --fake


# Initialize redis worker for celery and celery's beat scheduler in the background
celery -A tasks worker --loglevel=INFO &
celery -A tasks beat &

# Initializes the API.
exec $cmd
2 changes: 0 additions & 2 deletions django/manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import os
import sys


def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tts_be.settings')
Expand All @@ -17,6 +16,5 @@ def main():
) from exc
execute_from_command_line(sys.argv)


if __name__ == '__main__':
main()
2 changes: 2 additions & 0 deletions django/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,5 @@ djangorestframework==3.11.0
pytz==2021.3
sqlparse==0.4.2
mysqlclient==1.4.6
celery==5.2.7
redis==3.5.3
23 changes: 23 additions & 0 deletions django/tasks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
from celery import Celery
from celery.schedules import crontab
import os

app = Celery('tasks', broker="redis://tts-be-redis_service-1:6379")

# Gets called after celery sets up. Creates a worker that runs the dump_statistics function at midnight and noon everyday
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(
crontab(minute='0', hour='0, 12'),
dump_statistics.s(),
name='dump statistics'
)

@app.task
def dump_statistics():
command = "mysqldump -P {} -h db -u {} -p{} {} statistics > statistics.sql".format(
os.environ["MYSQL_PORT"],
os.environ["MYSQL_USER"],
os.environ["MYSQL_PASSWORD"],
os.environ["MYSQL_DATABASE"])
os.system(command)
89 changes: 0 additions & 89 deletions django/university/stats.py

This file was deleted.

35 changes: 18 additions & 17 deletions django/university/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,17 @@
from university.models import Professor
from university.models import ScheduleProfessor
from university.models import CourseMetadata
from university.models import Statistics
from django.http import JsonResponse
from django.core import serializers
from rest_framework.decorators import api_view
from django.db.models import Max
from university.stats import statistics, cache_statistics
from django.db import transaction
import json
import os
from django.utils import timezone
# Create your views here.

"""
Initialization of statistics.
"""

DEFAULT_YEAR = 2023
statistics(Course.objects.filter(year=DEFAULT_YEAR).values(), DEFAULT_YEAR)

def get_field(value):
return value.field
Expand Down Expand Up @@ -56,10 +52,18 @@ def course_units(request, course_id, year, semester):
course_units.__dict__.update(course_units.course_unit.__dict__)
del course_units.__dict__["_state"]
json_data.append(course_units.__dict__)

stats = statistics.get_instance()
if stats != None:
stats.increment_requests_stats(id=course_id)

course = Course.objects.get(id = course_id)

with transaction.atomic():
statistics, created = Statistics.objects.select_for_update().get_or_create(
course_unit_id = course_id,
acronym = course.acronym,
defaults = {"visited_times": 0, "last_updated": timezone.now()},
)
statistics.visited_times += 1
statistics.last_updated = timezone.now()
statistics.save()

return JsonResponse(json_data, safe=False)

Expand Down Expand Up @@ -119,13 +123,10 @@ def data(request):
name = request.GET.get('name')
password = request.GET.get('password')
if name == os.environ['STATISTICS_NAME'] and password == os.environ['STATISTICS_PASS']:
stats = statistics.get_instance()
if stats != None:
json_data = stats.export_request_stats(Course.objects.filter(year=stats.get_year()).values())
cache_statistics()
return HttpResponse(json.dumps(json_data), content_type='application/json')
json_data = serializers.serialize("json", Statistics.objects.all())
return HttpResponse(json_data, content_type='application/json')
else:
return HttpResponse(status=401)
return HttpResponse(status=401)

"""
Returns all the professors of a class of the schedule id
Expand Down
9 changes: 5 additions & 4 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ services:
volumes:
- ./fetcher/:/fetcher/





redis_service:
image: redis:6.2-bullseye
restart: always
ports:
- '6379:6379'
19 changes: 18 additions & 1 deletion mysql/sql/00_schema_mysql.sql
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,17 @@ CREATE TABLE `professor` (
) ENGINE=InnoDB CHARSET = utf8 COLLATE = utf8_general_ci;


-- --------------------------------------------------------
-- Table structure for table `statistics`
--

CREATE TABLE `statistics` (
`course_unit_id` int(11) NOT NULL,
`acronym` varchar(10) NOT NULL,
`visited_times` int(11) NOT NULL,
`last_updated` datetime NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;


-- Add primary keys
alter TABLE faculty ADD PRIMARY KEY (`acronym`);
Expand All @@ -135,6 +146,8 @@ alter TABLE course_metadata ADD FOREIGN KEY (`course_id`) REFERENCES `course`(`i
alter TABLE schedule ADD PRIMARY KEY (`id`);
alter TABLE schedule ADD FOREIGN KEY (`course_unit_id`) REFERENCES `course_unit`(`id`) ON DELETE CASCADE ON UPDATE CASCADE;

alter TABLE statistics ADD PRIMARY KEY (`course_unit_id`);

alter TABLE professor ADD PRIMARY KEY (`sigarra_id`);

alter TABLE schedule_professor ADD PRIMARY KEY (`schedule_id`, `professor_sigarra_id`);
Expand Down Expand Up @@ -163,8 +176,12 @@ CREATE UNIQUE INDEX `faculty_acronym` ON `faculty`(`acronym`);
--
CREATE INDEX `schedule_course_unit_id` ON `schedule`(`course_unit_id`);

--
-- Indexes for table `schedule`
--
CREATE INDEX `statistics` ON `statistics`(`course_unit_id`);

--
-- Indexes for table `course_metadata`
--
CREATE INDEX `course_metadata_index` ON `course_metadata`(`course_id`, `course_unit_id`, `course_unit_year`);