Skip to content

Commit

Permalink
Logging setup
Browse files Browse the repository at this point in the history
  • Loading branch information
gouline committed Dec 15, 2023
1 parent f45e6e8 commit 8714ca0
Show file tree
Hide file tree
Showing 6 changed files with 89 additions and 121 deletions.
4 changes: 3 additions & 1 deletion dbtmetabase/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@
from .dbt import DbtReader
from .metabase import MetabaseClient

logger = logging.getLogger(__name__)

__all__ = ["DbtReader", "MetabaseClient"]

try:
__version__ = importlib.metadata.version("dbt-metabase")
except importlib.metadata.PackageNotFoundError:
logger.warning("No version found in metadata")
__version__ = "0.0.0-UNKONWN"
logging.warning("No version found in metadata")
44 changes: 41 additions & 3 deletions dbtmetabase/__main__.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,55 @@
import functools
import logging
from logging.handlers import RotatingFileHandler
from pathlib import Path
from typing import Callable, Iterable, List, Optional, Union

import click
import yaml
from rich.logging import RichHandler
from typing_extensions import cast

from .dbt import DbtReader
from .logger import logging as package_logger
from .metabase import MetabaseClient

LOG_PATH = Path.home().absolute() / ".dbt-metabase" / "logs" / "dbtmetabase.log"

logger = logging.getLogger(__name__)


def _setup_logger(level: int = logging.INFO):
"""Basic logger configuration for the CLI.
Args:
level (int, optional): Logging level. Defaults to logging.INFO.
"""

LOG_PATH.parent.mkdir(parents=True, exist_ok=True)
file_handler = RotatingFileHandler(
filename=LOG_PATH,
maxBytes=int(1e6),
backupCount=3,
)
file_handler.setFormatter(
logging.Formatter("%(asctime)s — %(name)s — %(levelname)s — %(message)s")
)
file_handler.setLevel(logging.WARNING)

rich_handler = RichHandler(
level=level,
rich_tracebacks=True,
markup=True,
show_time=False,
)

logging.basicConfig(
level=level,
format="%(asctime)s — %(message)s",
datefmt="%Y-%m-%d %H:%M:%S %z",
handlers=[file_handler, rich_handler],
force=True,
)


def _comma_separated_list_callback(
ctx: click.Context,
Expand Down Expand Up @@ -203,8 +242,7 @@ def wrapper(
verbose: bool,
**kwargs,
):
if verbose:
package_logger.LOGGING_LEVEL = logging.DEBUG
_setup_logger(level=logging.DEBUG if verbose else logging.INFO)

return func(
dbt_reader=DbtReader(
Expand Down
39 changes: 20 additions & 19 deletions dbtmetabase/dbt.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import dataclasses
import json
import logging
import re
from enum import Enum
from pathlib import Path
from typing import Iterable, List, Mapping, MutableMapping, Optional, Sequence

from .logger.logging import logger
logger = logging.getLogger(__name__)

# Allowed metabase.* fields
_METABASE_COMMON_META_FIELDS = [
Expand Down Expand Up @@ -143,17 +144,17 @@ def read_models(
model_database = node["database"].upper()

if node["resource_type"] != "model":
logger().debug("Skipping %s not of resource type model", model_name)
logger.debug("Skipping %s not of resource type model", model_name)
continue

if node["config"]["materialized"] == "ephemeral":
logger().debug(
logger.debug(
"Skipping ephemeral model %s not manifested in database", model_name
)
continue

if model_database != self.database:
logger().debug(
logger.debug(
"Skipping %s in database %s, not in target database %s",
model_name,
model_database,
Expand All @@ -162,7 +163,7 @@ def read_models(
continue

if self.schema and model_schema != self.schema:
logger().debug(
logger.debug(
"Skipping %s in schema %s not in target schema %s",
model_name,
model_schema,
Expand All @@ -171,15 +172,15 @@ def read_models(
continue

if model_schema in self.schema_excludes:
logger().debug(
logger.debug(
"Skipping %s in schema %s marked for exclusion",
model_name,
model_schema,
)
continue

if not self.model_selected(model_name):
logger().debug(
logger.debug(
"Skipping %s not included in includes or excluded by excludes",
model_name,
)
Expand All @@ -202,17 +203,17 @@ def read_models(
source_database = node["database"].upper()

if node["resource_type"] != "source":
logger().debug("Skipping %s not of resource type source", source_name)
logger.debug("Skipping %s not of resource type source", source_name)
continue

if source_database != self.database:
logger().debug(
logger.debug(
"Skipping %s not in target database %s", source_name, self.database
)
continue

if self.schema and source_schema != self.schema:
logger().debug(
logger.debug(
"Skipping %s in schema %s not in target schema %s",
source_name,
source_schema,
Expand All @@ -221,15 +222,15 @@ def read_models(
continue

if source_schema in self.schema_excludes:
logger().debug(
logger.debug(
"Skipping %s in schema %s marked for exclusion",
source_name,
source_schema,
)
continue

if not self.model_selected(source_name):
logger().debug(
logger.debug(
"Skipping %s not included in includes or excluded by excludes",
source_name,
)
Expand Down Expand Up @@ -330,7 +331,7 @@ def _read_model_relationships(
# Note, sometimes only the referenced model is returned.
depends_on_nodes = list(child["depends_on"][model_type])
if len(depends_on_nodes) > 2:
logger().warning(
logger.warning(
"Expected at most two nodes, got %d {} nodes, skipping %s {}",
len(depends_on_nodes),
unique_id,
Expand All @@ -341,7 +342,7 @@ def _read_model_relationships(
# Otherwise, the primary key of the current model would be (incorrectly) determined to be a foreign key.
is_incoming_relationship_test = depends_on_nodes[1] != unique_id
if len(depends_on_nodes) == 2 and is_incoming_relationship_test:
logger().debug(
logger.debug(
"Skip this incoming relationship test, concerning nodes %s.",
depends_on_nodes,
)
Expand All @@ -353,7 +354,7 @@ def _read_model_relationships(
depends_on_nodes.remove(unique_id)

if len(depends_on_nodes) != 1:
logger().warning(
logger.warning(
"Expected single node after filtering, got %d nodes, skipping %s",
len(depends_on_nodes),
unique_id,
Expand All @@ -369,7 +370,7 @@ def _read_model_relationships(
)

if not fk_target_table_alias:
logger().debug(
logger.debug(
"Could not resolve depends on model id %s to a model in manifest",
depends_on_id,
)
Expand Down Expand Up @@ -447,7 +448,7 @@ def set_column_foreign_key(

if not table or not field:
if table or field:
logger().warning(
logger.warning(
"Foreign key requires table and field for column %s",
metabase_column.name,
)
Expand All @@ -462,7 +463,7 @@ def set_column_foreign_key(
[x.strip('"').upper() for x in table_path]
)
metabase_column.fk_target_field = field.strip('"').upper()
logger().debug(
logger.debug(
"Relation from %s to %s.%s",
metabase_column.name,
metabase_column.fk_target_table,
Expand Down Expand Up @@ -503,6 +504,6 @@ def parse_ref(text: str) -> Optional[str]:
# We are catching the rightmost argument of either source or ref which is ultimately the table name
matches = re.findall(r"['\"]([\w\_\-\ ]+)['\"][ ]*\)$", text.strip())
if matches:
logger().debug("%s -> %s", text, matches[0])
logger.debug("%s -> %s", text, matches[0])
return matches[0]
return None
Empty file removed dbtmetabase/logger/__init__.py
Empty file.
75 changes: 0 additions & 75 deletions dbtmetabase/logger/logging.py

This file was deleted.

Loading

0 comments on commit 8714ca0

Please sign in to comment.