Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

migrate dbt.exceptions to dbt_common #160

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions dbt/adapters/hive/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from typing import Any, Optional, Tuple
from multiprocessing.context import SpawnContext

import dbt.exceptions
import impala.dbapi
from dbt.adapters.contracts.connection import Credentials
from dbt.adapters.sql import SQLConnectionManager
Expand All @@ -30,6 +29,7 @@
ConnectionState,
)
from dbt.adapters.events.logging import AdapterLogger
from dbt_common.exceptions import DbtConfigError, DbtRuntimeError
from dbt_common.events.functions import fire_event
from dbt.adapters.events.types import ConnectionUsed, SQLQuery, SQLQueryStatus
from dbt.utils import DECIMALS
Expand Down Expand Up @@ -80,7 +80,7 @@ def __pre_deserialize__(cls, data):
def __post_init__(self):
# hive classifies database and schema as the same thing
if self.database is not None and self.database != self.schema:
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
f" schema: {self.schema} \n"
f" database: {self.database} \n"
f"On Hive, database must be omitted or have the same value as"
Expand Down Expand Up @@ -232,7 +232,7 @@ def open(cls, connection):
ca_cert=credentials.ca_cert,
)
else:
raise dbt.exceptions.DbtProfileError(
raise DbtConfigError(
f"Invalid auth_type {credentials.auth_type} provided"
)
connection_end_time = time.time()
Expand Down Expand Up @@ -272,20 +272,20 @@ def exception_handler(self, sql: str):
yield
except HttpError as httpError:
logger.debug(f"Authorization error: {httpError}")
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
"HTTP Authorization error: " + str(httpError) + ", please check your credentials"
)
except HiveServer2Error as hiveError:
logger.debug(f"Server connection error: {hiveError}")
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
"Unable to establish connection to Hive server: " + str(hiveError)
)
except Exception as exc:
logger.debug(f"Error while running:\n{sql}")
logger.debug(exc)
if len(exc.args) == 0:
raise
raise dbt.exceptions.DbtRuntimeError(str(exc))
raise DbtRuntimeError(str(exc))

def cancel(self, connection):
connection.handle.cancel()
Expand Down Expand Up @@ -466,7 +466,7 @@ def validate_creds(cls, creds, required):

for key in required:
if not hasattr(creds, key):
raise dbt.exceptions.DbtProfileError(
raise DbtConfigError(
"The config '{}' is required when using the {} method"
" to connect to Hive".format(key, method)
)
13 changes: 7 additions & 6 deletions dbt/adapters/hive/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import agate

import dbt
import dbt.exceptions
#import dbt.exceptions

from dbt.adapters.base import AdapterConfig
from dbt.adapters.base.impl import catch_as_completed
Expand All @@ -33,6 +33,7 @@
from dbt_common.utils import executor

from dbt_common.clients import agate_helper
from dbt_common.exceptions import CompilationError, DbtRuntimeError

from dbt.adapters.events.logging import AdapterLogger

Expand Down Expand Up @@ -105,7 +106,7 @@ def add_schema_to_cache(self, schema) -> str:
"""Cache a new schema in dbt. It will show up in `list relations`."""
if schema is None:
name = self.nice_connection_name()
dbt.exceptions.raise_compiler_error(f"Attempted to cache a null schema for {name}")
CompilationError(f"Attempted to cache a null schema for {name}")
if dbt.flags.USE_CACHE:
self.cache.add_schema(None, schema)
# so jinja doesn't render things
Expand Down Expand Up @@ -133,7 +134,7 @@ def list_relations_without_caching(self, schema_relation: HiveRelation) -> List[
try:
result_tables = self.execute_macro("hive__list_tables_without_caching", kwargs=kwargs)
result_views = self.execute_macro("hive__list_views_without_caching", kwargs=kwargs)
except dbt.exceptions.DbtRuntimeError as e:
except DbtRuntimeError as e:
errmsg = getattr(e, "msg", "")
if f"Database '{schema_relation}' not found" in errmsg:
return []
Expand Down Expand Up @@ -313,7 +314,7 @@ def get_columns_in_relation(self, relation: Relation) -> List[HiveColumn]:
try:
rows: List[agate.Row] = super().get_columns_in_relation(relation)
columns = self.parse_describe_formatted(relation, rows)
except dbt.exceptions.DbtRuntimeError as e:
except DbtRuntimeError as e:
# impala would throw error when table doesn't exist
errmsg = getattr(e, "msg", "")
if (
Expand Down Expand Up @@ -354,7 +355,7 @@ def get_catalog(
"""Return a catalogs that contains information of all schemas"""
schema_map = self._get_catalog_schemas(relation_configs)
if len(schema_map) > 1:
dbt.exceptions.raise_compiler_error(
CompilationError(
f"Expected only one database in get_catalog, found " f"{list(schema_map)}"
)

Expand Down Expand Up @@ -391,7 +392,7 @@ def _get_one_catalog(self, information_schema, schemas) -> agate.Table:
threadself.get_columns_in_relation
"""
if len(schemas) != 1:
dbt.exceptions.raise_compiler_error(
CompilationError(
f"Expected only one schema in Hive _get_one_catalog, found " f"{schemas}"
)

Expand Down