Skip to content

Commit

Permalink
SW-4085 only save usage data (#1835)
Browse files Browse the repository at this point in the history
* only save usage data
* include iobeam bug fix
* fix pcf malfunction
  • Loading branch information
Josef-MrBeam authored Nov 3, 2023
1 parent 0cbd615 commit 350439a
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 223 deletions.
2 changes: 1 addition & 1 deletion octoprint_mrbeam/dependencies.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
iobeam==1.3.0
iobeam==1.3.1
mrb-hw-info==1.0.0
mrbeam-ledstrips==1.0.0
mrbeamdoc==1.1.0
1 change: 0 additions & 1 deletion octoprint_mrbeam/iobeam/hw_malfunction_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ class HwMalfunctionHandler(object):
MALFUNCTION_ID_BOTTOM_OPEN,
MALFUNCTION_ID_LASERHEADUNIT_MISSING,
HW_MANIPULATION,
PCF_ANOMALY,
FAN_NOT_SPINNING,
EXHAUST_HOSE_BLOCKED,
MALFUNCTION_ID_GENERAL,
Expand Down
133 changes: 20 additions & 113 deletions octoprint_mrbeam/migration/Mig006.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,22 @@
import os
import re
from datetime import date
import datetime

import yaml

from octoprint_mrbeam.util.cmd_exec import exec_cmd, exec_cmd_output

from octoprint_mrbeam import mrb_logger
from octoprint_mrbeam.migration.migration_base import (
MigrationBaseClass,
MIGRATION_RESTART,
)


class Mig006FixUsageData(MigrationBaseClass):
class Mig006BackupUsageDataBeforeMigration(MigrationBaseClass):
"""
This migration fix the usage data that was lost during v0.15.0 and 0.15.0post0 updates
This migration backups the usage data if it is the old airfilter structure.
"""

COMMAND_TO_GET_LOGS = 'grep -r "octoprint.plugins.mrbeam.analytics.usage - ERROR - No job time found in {}" /home/pi/.octoprint/logs/'
COMMAND_TO_CHECK_IF_VERSION_WAS_PRESENT = 'grep -a -e "Mr Beam Laser Cutter (0.15.0.post0) = /home/pi/oprint/local/lib/python2.7/site-packages/octoprint_mrbeam" -e "Mr Beam Laser Cutter (0.15.0) = /home/pi/oprint/local/lib/python2.7/site-packages/octoprint_mrbeam" /home/pi/.octoprint/logs/*'
USAGE_DATA_FILE_PATH = "/home/pi/.octoprint/analytics/usage.yaml"
USAGE_DATA_FILE_PATH_BACKUP = "/home/pi/.octoprint/analytics/usage_bak.yaml"

def __init__(self, plugin):
self._backup_usage_data = None
super(Mig006FixUsageData, self).__init__(
plugin, restart=MIGRATION_RESTART.OCTOPRINT
)
super(Mig006BackupUsageDataBeforeMigration, self).__init__(plugin)

@property
def id(self):
Expand All @@ -37,108 +26,26 @@ def id(self):
def shouldrun(cls, beamos_version):
"""Checks if this Miration should run.
overrides the current behaviour as this migration should run if the log file contains the "octoprint.plugins.mrbeam.analytics.usage - ERROR - No job time found in {}, returning 0" error
overrides the current behaviour as this migration should run if the usage file includes the old airfilter structure
"""
command_output, code = exec_cmd_output(
Mig006FixUsageData.COMMAND_TO_CHECK_IF_VERSION_WAS_PRESENT,
log=True,
shell=True,
)
with open(cls.USAGE_DATA_FILE_PATH, "r") as yaml_file:
yaml_data = yaml.safe_load(yaml_file)
if "prefilter" in yaml_data or "carbon_filter" in yaml_data:
return True

if code == 0 and command_output != "":
return True
else:
return False
return False

def _run(self):
self._logger.debug("fix usage data")
found_lines = exec_cmd_output(self.COMMAND_TO_GET_LOGS, log=True, shell=True)
found_lines = str(found_lines).replace("\\'", "'").replace("\\n", "\n")

regex = r"No job time found in {}, returning 0 - {.*'prefilter': {'[^}]*'job_time': (\d+\.\d+)[^}]*}*.+'total': {'[^}]*'job_time': (\d+\.\d+)[^}]*.*'carbon_filter': {'[^}]*'job_time': (\d+\.\d+)[^}]"

match = re.search(regex, found_lines)
self._logger.debug("match: {}".format(match))

if match:
bug_prefilter_job_time = float(match.group(1))
bug_total_time = float(match.group(2))
bug_carbon_filter_job_time = float(match.group(3))
self._logger.info("total_time: {}".format(bug_total_time))
self._logger.info(
"Carbon Filter Job Time: {}".format(bug_carbon_filter_job_time)
)
self._logger.info("Prefilter Job Time: {}".format(bug_prefilter_job_time))

with open(self.USAGE_DATA_FILE_PATH, "r") as yaml_file:
yaml_data = yaml.load(yaml_file)
self._backup_usage_data = yaml_data
if (
float(yaml_data["total"]["job_time"]) - 180000 < bug_total_time
): # only migrate if the working time difference is less than 50 hours
self.exec_cmd(
"sudo mv {file} {file_new}".format(
file=self.USAGE_DATA_FILE_PATH_BACKUP,
file_new=self.USAGE_DATA_FILE_PATH_BACKUP
+ "_"
+ date.today().strftime("%Y_%m_%d"),
)
)
# Update the job_time in the airfilter prefilter
time_since_error = (
float(yaml_data["total"]["job_time"]) - bug_total_time
)
self._logger.info(
"current usage file {} -{}".format(
yaml_data, yaml_data.get("airfilter")
)
)
self._logger.info(
"workingtime since error: {}".format(time_since_error)
)
if "airfilter" in yaml_data:
for airfilter_serial, airfilter_data in yaml_data.get(
"airfilter"
).items():
self._logger.info(
"migrate airfilter {} - {}".format(
airfilter_serial, airfilter_data
)
)
if ("prefilter" or "carbon_filter") in airfilter_data:
yaml_data["airfilter"][airfilter_serial]["prefilter"][
"job_time"
] = (bug_prefilter_job_time + time_since_error)
yaml_data["airfilter"][airfilter_serial]["carbon_filter"][
"job_time"
] = (bug_carbon_filter_job_time + time_since_error)
self._logger.info(
"Data was migrated successfully. {}".format(yaml_data)
)
# pop elements of old air filter structure
if "prefilter" in yaml_data:
yaml_data.pop("prefilter")
if "carbon_filter" in yaml_data:
yaml_data.pop("carbon_filter")

# Save the modified YAML back to the file
with open(self.USAGE_DATA_FILE_PATH, "w") as yaml_file:
yaml.safe_dump(yaml_data, yaml_file, default_flow_style=False)
else:
self._logger.info(
"Data will not be migrated as there was already to many working hours time in between."
)
else:
self._logger.warn(
"Could not find the usage data to recover to in the logs."
self._logger.info("save usage data")
self.exec_cmd(
"sudo cp {file} {file_new}".format(
file=self.USAGE_DATA_FILE_PATH,
file_new=self.USAGE_DATA_FILE_PATH
+ "_"
+ datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"),
)
super(Mig006FixUsageData, self)._run()
)
super(Mig006BackupUsageDataBeforeMigration, self)._run()

def _rollback(self):
if self._backup_usage_data:
with open(self.USAGE_DATA_FILE_PATH, "w") as yaml_file:
yaml.safe_dump(
self._backup_usage_data, yaml_file, default_flow_style=False
)
return True
super(Mig006FixUsageData, self)._rollback()
super(Mig006BackupUsageDataBeforeMigration, self)._rollback()
4 changes: 2 additions & 2 deletions octoprint_mrbeam/migration/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from octoprint_mrbeam.migration.Mig003 import Mig003EnableLogrotateBuster
from octoprint_mrbeam.migration.Mig004 import Mig004DisableDebugLogging
from octoprint_mrbeam.migration.Mig005 import Mig005InstallNTP
from octoprint_mrbeam.migration.Mig006 import Mig006FixUsageData
from octoprint_mrbeam.migration.Mig006 import Mig006BackupUsageDataBeforeMigration

# To add migrations they have to be added to this list till we automate it
list_of_migrations = [
Expand All @@ -36,5 +36,5 @@
Mig003EnableLogrotateBuster,
Mig004DisableDebugLogging,
Mig005InstallNTP,
Mig006FixUsageData,
Mig006BackupUsageDataBeforeMigration,
]
132 changes: 26 additions & 106 deletions tests/migrations/test_migration_Mig006.py
Original file line number Diff line number Diff line change
@@ -1,48 +1,11 @@
from datetime import date
import datetime

import pytest
from mock.mock import patch, mock_open, call, MagicMock
from mock.mock import patch, mock_open

from octoprint_mrbeam.migration.Mig006 import Mig006FixUsageData
from octoprint_mrbeam.migration.Mig006 import Mig006BackupUsageDataBeforeMigration

OUTPUT_OF_EXEC_CMD = """/home/pi/.octoprint/logs/octoprint.log:2023-10-31 09:28:21,577 - octoprint.plugins.mrbeam.analytics.usage - ERROR - No job time found in {}, returning 0 - {'gantry': {'complete': True, 'job_time': 347.17822551727295}, 'succ_jobs': {'count': 2, 'complete': True}, 'airfilter': {17873: {'prefilter': {'complete': True, 'job_time': 347.17822551727295}, 'carbon_filter': {'complete': True, 'job_time': 347.17822551727295}}}, 'first_write': 1646219798.649828, 'ts': 1698744501.154159, 'prefilter': {'complete': True, 'job_time': 347.17822551727295}, 'compressor': {'complete': True, 'job_time': 347.17822551727295}, 'version': '0.15.0.post0', 'laser_head': {'75c8a85e-3c09-4918-befa-408251da5752': {'complete': True, 'job_time': 79.7521630525589}, 'LHS0030322910': {'complete': True, 'job_time': 142.30512607097626}, 'no_serial': {'complete': True, 'job_time': 0.0}, 'LHS0051021128': {'complete': True, 'job_time': 0.0}, '5078e646-0768-4ea2-9f54-61706de1df2c': {'complete': True, 'job_time': 0}}, 'serial': '000000008025FBB0-2R', 'total': {'complete': True, 'job_time': 1721.17822551727295}, 'carbon_filter': {'complete': True, 'job_time': 347.17822551727295}, 'restored': 1}
/home/pi/.octoprint/logs/octoprint.log:2023-10-31 09:28:26,850 - octoprint.plugins.mrbeam.analytics.usage - ERROR - No job time found in {}, returning 0 - {'gantry': {'complete': True, 'job_time': 347.17822551727295}, 'succ_jobs': {'count': 2, 'complete': True}, 'airfilter': {17873: {'prefilter': {'complete': True, 'job_time': 347.17822551727295}, 'carbon_filter': {'complete': True, 'job_time': 347.17822551727295}}}, 'first_write': 1646219798.649828, 'ts': 1698744506.345606, 'prefilter': {'complete': True, 'job_time': 347.17822551727295}, 'compressor': {'complete': True, 'job_time': 347.17822551727295}, 'version': '0.15.0.post0', 'laser_head': {'75c8a85e-3c09-4918-befa-408251da5752': {'complete': True, 'job_time': 79.7521630525589}, 'LHS0030322910': {'complete': True, 'job_time': 142.30512607097626}, 'no_serial': {'complete': True, 'job_time': 0.0}, 'LHS0051021128': {'complete': True, 'job_time': 0.0}, '5078e646-0768-4ea2-9f54-61706de1df2c': {'complete': True, 'job_time': 0}}, 'serial': '000000008025FBB0-2R', 'total': {'complete': True, 'job_time': 1721.17822551727295}, 'carbon_filter': {'complete': True, 'job_time': 347.17822551727295}, 'restored': 1}
/home/pi/.octoprint/logs/octoprint.log:2023-10-31 09:28:26,900 - octoprint.plugins.mrbeam.analytics.usage - ERROR - No job time found in {}, returning 0 - {'gantry': {'complete': True, 'job_time': 347.17822551727295}, 'succ_jobs': {'count': 2, 'complete': True}, 'airfilter': {17873: {'prefilter': {'complete': True, 'job_time': 347.17822551727295}, 'carbon_filter': {'complete': True, 'job_time': 347.17822551727295}}}, 'first_write': 1646219798.649828, 'ts': 1698744506.345606, 'prefilter': {'complete': True, 'job_time': 347.17822551727295}, 'compressor': {'complete': True, 'job_time': 347.17822551727295}, 'version': '0.15.0.post0', 'laser_head': {'75c8a85e-3c09-4918-befa-408251da5752': {'complete': True, 'job_time': 79.7521630525589}, 'LHS0030322910': {'complete': True, 'job_time': 142.30512607097626}, 'no_serial': {'complete': True, 'job_time': 0.0}, 'LHS0051021128': {'complete': True, 'job_time': 0.0}, '5078e646-0768-4ea2-9f54-61706de1df2c': {'complete': True, 'job_time': 0}}, 'serial': '000000008025FBB0-2R', 'total': {'complete': True, 'job_time': 1721.17822551727295}, 'carbon_filter': {'complete': True, 'job_time': 347.17822551727295}, 'restored': 1}
/home/pi/.octoprint/logs/octoprint.log:2023-10-31 09:28:26,955 - octoprint.plugins.mrbeam.analytics.usage - ERROR - No job time found in {}, returning 0 - {'gantry': {'complete': True, 'job_time': 347.17822551727295}, 'succ_jobs': {'count': 2, 'complete': True}, 'airfilter': {17873: {'prefilter': {'complete': True, 'job_time': 347.17822551727295}, 'carbon_filter': {'complete': True, 'job_time': 347.17822551727295}}}, 'first_write': 1646219798.649828, 'ts': 1698744506.345606, 'prefilter': {'complete': True, 'job_time': 347.17822551727295}, 'compressor': {'complete': True, 'job_time': 347.17822551727295}, 'version': '0.15.0.post0', 'laser_head': {'75c8a85e-3c09-4918-befa-408251da5752': {'complete': True, 'job_time': 79.7521630525589}, 'LHS0030322910': {'complete': True, 'job_time': 142.30512607097626}, 'no_serial': {'complete': True, 'job_time': 0.0}, 'LHS0051021128': {'complete': True, 'job_time': 0.0}, '5078e646-0768-4ea2-9f54-61706de1df2c': {'complete': True, 'job_time': 0}}, 'serial': '000000008025FBB0-2R', 'total': {'complete': True, 'job_time': 1721.17822551727295}, 'carbon_filter': {'complete': True, 'job_time': 347.17822551727295}, 'restored': 1}"""

OUTPUT_OF_EXEC_CMD_EMPTY = ""

PREVIOUS_YAML_FILE = """
carbon_filter:
complete: false
job_time: 889373.3413743973
prefilter:
complete: false
job_time: 889373.3413743973
compressor:
complete: false
job_time: 889373.3413743973
first_write: 1649453028.697643
gantry:
complete: false
job_time: 889373.3413743973
laser_head:
no_serial:
complete: false
job_time: 0.0
restored: 2
serial: 00000000XXXXXX-2Q
succ_jobs:
complete: false
count: 620
total:
complete: false
job_time: 889373.3413743973
ts: 1698678400.341805
version: 0.15.0.post0
"""

BROKEN_YAML_FILE = """airfilter:
YAML_FILE = """airfilter:
60745:
carbon_filter:
complete: false
Expand Down Expand Up @@ -79,7 +42,7 @@
version: 0.15.0.post0
"""

BROKEN_YAML_FILE_TOO_OLD = """airfilter:
YAML_FILE_SHOULD_NOT_RUN = """airfilter:
60745:
carbon_filter:
complete: false
Expand All @@ -105,32 +68,31 @@
count: 620
total:
complete: false
job_time: 181831.3413743973
job_time: 1831.3413743973
ts: 1698678400.341805
version: 0.15.0.post0
"""


@pytest.fixture
def migration006():
return Mig006FixUsageData(None)
return Mig006BackupUsageDataBeforeMigration(None)


@pytest.mark.parametrize(
"command_output,return_code,should_run",
"yaml_file,should_run",
[
(OUTPUT_OF_EXEC_CMD, 0, True),
(OUTPUT_OF_EXEC_CMD_EMPTY, 0, False),
("grep: /home/pi/.octoprint/logs/: No such file or directory", 2, False),
(YAML_FILE, True),
(YAML_FILE_SHOULD_NOT_RUN, False),
],
ids=["command_output", "command_output_empty", "command_error"],
ids=["should_run", "should_not_run"],
)
def test_migration_should_run(command_output, return_code, should_run, migration006):
with patch(
"octoprint_mrbeam.migration.Mig006.exec_cmd_output",
return_value=(command_output, return_code),
):
assert migration006.shouldrun(Mig006FixUsageData, "0.14.0") == should_run
def test_migration_should_run(yaml_file, should_run, migration006):
with patch("__builtin__.open", mock_open(read_data=yaml_file)) as mock_open_func:
assert (
migration006.shouldrun(Mig006BackupUsageDataBeforeMigration, "0.14.0")
== should_run
)


def test_migration_id(migration006):
Expand All @@ -145,55 +107,13 @@ def mock_yaml_safe_dump():

def test_migration_did_run(migration006, mock_yaml_safe_dump, mocker):
mocker.patch.object(migration006, "exec_cmd", autospec=True)
with patch(
"octoprint_mrbeam.migration.Mig006.exec_cmd_output",
return_value=(OUTPUT_OF_EXEC_CMD, 0),
), patch(
"__builtin__.open", mock_open(read_data=BROKEN_YAML_FILE)
) as mock_open_func:

# Act
migration006.run()

# Assert
migration006.exec_cmd.assert_any_call(
"sudo mv /home/pi/.octoprint/analytics/usage_bak.yaml /home/pi/.octoprint/analytics/usage_bak.yaml_{}".format(
date.today().strftime("%Y_%m_%d")
)

# Act
migration006.run()

# Assert
migration006.exec_cmd.assert_any_call(
"sudo cp /home/pi/.octoprint/analytics/usage.yaml /home/pi/.octoprint/analytics/usage.yaml_{}".format(
datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
)
assert mock_yaml_safe_dump.call_args.args[0] == {
"airfilter": {
60745: {
"carbon_filter": {
"complete": False,
"job_time": 457.34137439729983,
},
"prefilter": {"complete": False, "job_time": 457.3413743972999},
}
},
"compressor": {"complete": False, "job_time": 889373.3413743973},
"first_write": 1649453028.697643,
"gantry": {"complete": False, "job_time": 889373.3413743973},
"laser_head": {"no_serial": {"complete": False, "job_time": 0.0}},
"restored": 2,
"serial": "00000000XXXXXX-2Q",
"succ_jobs": {"complete": False, "count": 620},
"total": {"complete": False, "job_time": 1831.3413743973},
"ts": 1698678400.341805,
"version": "0.15.0.post0",
}


def test_migration_too_old(migration006, mock_yaml_safe_dump):
with patch(
"octoprint_mrbeam.migration.Mig006.exec_cmd_output",
return_value=(OUTPUT_OF_EXEC_CMD, 0),
), patch(
"__builtin__.open", mock_open(read_data=BROKEN_YAML_FILE_TOO_OLD)
) as mock_open_func:

# Act
migration006.run()

# Assert
mock_yaml_safe_dump.assert_not_called()
)

0 comments on commit 350439a

Please sign in to comment.