Skip to content

Commit

Permalink
fix: quality gates
Browse files Browse the repository at this point in the history
  • Loading branch information
Furkan Cetin committed Oct 18, 2024
1 parent 27c1ac4 commit 9ca67e2
Show file tree
Hide file tree
Showing 12 changed files with 21 additions and 25 deletions.
6 changes: 3 additions & 3 deletions databricks/lib/repository/ground_truth/helpers.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from typing import Any, Dict, Optional

from databricks.sdk.runtime import spark
from pyspark.sql import functions as F

from lib.repository.ground_truth.models import Revision
from lib.spark_helper.ground_truth import (
GroundTruthDBStorage,
GroundTruthFileStorage,
)
from pyspark.sql import functions as F

from databricks.sdk.runtime import spark


class GroundTruthHelper:
Expand Down
5 changes: 2 additions & 3 deletions databricks/lib/repository/ground_truth/stats.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
from typing import Any, Dict, List, Optional, Sequence, Tuple

from sklearn.metrics import accuracy_score, precision_score, recall_score
from tabulate import tabulate

import lib.spark_helper.predictions as predictions_helper
from lib.spark_helper.ground_truth import GroundTruthFileStorage
from lib.spark_helper.predictions import Prediction
from sklearn.metrics import accuracy_score, precision_score, recall_score
from tabulate import tabulate


def replace_values(
Expand Down
3 changes: 1 addition & 2 deletions databricks/lib/spark_helper/files.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
from io import BytesIO
from typing import Any, Dict

from pypdf import PdfReader

from lib.spark_helper.storage_service import SparkStorageService
from pypdf import PdfReader


def extract_text_from_pdf(pdf_content: bytes) -> str:
Expand Down
4 changes: 2 additions & 2 deletions databricks/notebooks/calculate_statistics.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
# Databricks notebook source
import json

from databricks.sdk.runtime import dbutils

from lib.repository.configs.service import load_config
from lib.repository.ground_truth.stats import StatsCalculator
from lib.spark_helper.ground_truth import GroundTruthFileStorage
from lib.spark_helper.predictions import TemporaryStorage
from lib.spark_helper.storage_service import SparkStorageService

from databricks.sdk.runtime import dbutils

job_ids = json.loads(dbutils.widgets.get("job_ids"))
configs = load_config(project_name=dbutils.widgets.get("project_name"))
storage_service = SparkStorageService(configs)
Expand Down
4 changes: 2 additions & 2 deletions databricks/notebooks/commit_annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
import random
from typing import Any, Dict, Generator, List, Tuple

from databricks.sdk.runtime import dbutils

import lib.spark_helper.predictions as predictions_helper
from lib.badgerdoc.service import BadgerDocService
from lib.repository.configs.service import load_config
from lib.spark_helper.storage_service import SparkStorageService

from databricks.sdk.runtime import dbutils

configs = load_config(project_name=dbutils.widgets.get("project_name"))

storage_service = SparkStorageService(configs)
Expand Down
4 changes: 2 additions & 2 deletions databricks/notebooks/finish_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
import json
from typing import Any

from databricks.sdk.runtime import dbutils

from lib.badgerdoc.service import BadgerDocService

from databricks.sdk.runtime import dbutils

secrets_scope = dbutils.widgets.get("secrets_scope")

badgerdoc = BadgerDocService(
Expand Down
4 changes: 2 additions & 2 deletions databricks/notebooks/start_job.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# Databricks notebook source
import json

from databricks.sdk.runtime import dbutils

from lib.badgerdoc.service import BadgerDocService

from databricks.sdk.runtime import dbutils

secrets_scope = dbutils.widgets.get("secrets_scope")

badgerdoc = BadgerDocService(
Expand Down
4 changes: 2 additions & 2 deletions databricks/notebooks/store_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
import json

import requests
from databricks.sdk.runtime import dbutils

from lib.repository.configs.service import load_config
from lib.spark_helper.files import FilesStorage

from databricks.sdk.runtime import dbutils

configs = load_config(project_name=dbutils.widgets.get("project_name"))
files_storage = FilesStorage(configs)

Expand Down
4 changes: 2 additions & 2 deletions databricks/notebooks/store_ground_truth.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
# Databricks notebook source
import json

from databricks.sdk.runtime import dbutils

from lib.badgerdoc.service import BadgerDocService
from lib.repository.configs.service import load_config
from lib.repository.ground_truth.helpers import GroundTruthHelper
from lib.repository.ground_truth.revision_factory import RevisionFactory

from databricks.sdk.runtime import dbutils

job_parameters = json.loads(dbutils.widgets.get("badgerdoc_job_parameters"))
tenant = job_parameters["tenant"]
revisions_dict = job_parameters["files_data"]
Expand Down
4 changes: 2 additions & 2 deletions databricks/notebooks/store_statistics.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
# Databricks notebook source
import json

from databricks.sdk.runtime import dbutils

import lib.spark_helper.predictions as predictions_helper
from lib.repository.configs.service import load_config
from lib.spark_helper.db_service import SparkDBService
from lib.spark_helper.storage_service import SparkStorageService

from databricks.sdk.runtime import dbutils

configs = load_config(project_name=dbutils.widgets.get("project_name"))
db_service = SparkDBService(configs)
storage_service = SparkStorageService(configs)
Expand Down
3 changes: 1 addition & 2 deletions jobs/jobs/databricks_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@
from datetime import datetime
from typing import Iterator, List

from databricks.sdk import WorkspaceClient

import jobs.pipeline as pipeline
from databricks.sdk import WorkspaceClient
from jobs.schemas import Pipeline

logger = logging.getLogger(__name__)
Expand Down
1 change: 0 additions & 1 deletion pipelines/pipelines/databricks_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from datetime import datetime

from databricks.sdk import WorkspaceClient

from pipelines.schemas import PipelineOut

logger = logging.getLogger(__name__)
Expand Down

0 comments on commit 9ca67e2

Please sign in to comment.