Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions .env.otlp.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@

OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="http://localhost:9411/api/v2/spans"
OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="http://localhost:9411/api/v2/spans"
USE_ZIPKIN=true

# Service identification
OTEL_SERVICE_NAME="my-graphrag-app"
OTEL_SERVICE_VERSION="1.0.0"
OTEL_SERVICE_NAMESPACE="my-company"

# Zipkin configuration
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="http://localhost:9411/api/v2/spans"

# Tracing configuration
OTEL_ENABLE_TRACING="true"
OTEL_ENABLE_METRICS="true"
OTEL_TRACE_SAMPLE_RATE="1.0" # Sample 100% of traces

# Environment
OTEL_DEPLOYMENT_ENVIRONMENT="production"

# Disable telemetry completely (if needed)
DISABLE_TELEMETRY="false"
16 changes: 16 additions & 0 deletions graphrag/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,19 @@
# Licensed under the MIT License

"""The GraphRAG package."""
import logging

from .telemetry import setup_telemetry, is_telemetry_disabled

logger = logging.getLogger(__name__)

# Initialize telemetry automatically when the package is imported
# unless explicitly disabled
if not is_telemetry_disabled():
try:
setup_telemetry()
logger.info("Telemetry initialized automatically")
except Exception as e:
logger.warning(f"Failed to initialize telemetry: {e}")
else:
logger.info("Telemetry is not enabled. (Can be enabled via environment variable DISABLE_TELEMETRY)")
9 changes: 8 additions & 1 deletion graphrag/api/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,17 @@
from graphrag.index.typing.pipeline_run_result import PipelineRunResult
from graphrag.index.workflows.factory import PipelineFactory
from graphrag.logger.standard_logging import init_loggers
from graphrag.telemetry.decorators import add_trace

logger = logging.getLogger(__name__)


@add_trace(
operation_name="build_index",
attributes={
"component": "indexing",
"operation": "build_index",
}
)
async def build_index(
config: GraphRagConfig,
method: IndexingMethod | str = IndexingMethod.Standard,
Expand Down
5 changes: 4 additions & 1 deletion graphrag/config/load_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

from graphrag.config.create_graphrag_config import create_graphrag_config
from graphrag.config.models.graph_rag_config import GraphRagConfig
from graphrag.telemetry.decorators import add_trace

_default_config_files = ["settings.yaml", "settings.yml", "settings.json"]

Expand Down Expand Up @@ -142,7 +143,9 @@ def _parse(file_extension: str, contents: str) -> dict[str, Any]:
)
raise ValueError(msg)


@add_trace(
operation_name="graphrag.config.load_config",
attributes={"component": "config"})
def load_config(
root_dir: Path,
config_filepath: Path | None = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from graphrag.index.operations.build_noun_graph.np_extractors.syntactic_parsing_extractor import (
SyntacticNounPhraseExtractor,
)

from graphrag.telemetry.decorators import add_trace

class NounPhraseExtractorFactory:
"""A factory class for creating noun phrase extractor."""
Expand Down Expand Up @@ -74,7 +74,10 @@ def get_np_extractor(cls, config: TextAnalyzerConfig) -> BaseNounPhraseExtractor
word_delimiter=config.word_delimiter,
)


@add_trace(
operation_name="np_extractor.factory.create_noun_phrase_extractor",
attributes={"component": "np_extractor_factory"},
)
def create_noun_phrase_extractor(
analyzer_config: TextAnalyzerConfig,
) -> BaseNounPhraseExtractor:
Expand Down
2 changes: 2 additions & 0 deletions graphrag/index/workflows/create_communities.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,13 @@
from graphrag.index.operations.create_graph import create_graph
from graphrag.index.typing.context import PipelineRunContext
from graphrag.index.typing.workflow import WorkflowFunctionOutput
from graphrag.telemetry.decorators import trace_workflow
from graphrag.utils.storage import load_table_from_storage, write_table_to_storage

logger = logging.getLogger(__name__)


@trace_workflow("create_communities")
async def run_workflow(
config: GraphRagConfig,
context: PipelineRunContext,
Expand Down
2 changes: 2 additions & 0 deletions graphrag/index/workflows/load_input_documents.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,13 @@
from graphrag.index.typing.context import PipelineRunContext
from graphrag.index.typing.workflow import WorkflowFunctionOutput
from graphrag.storage.pipeline_storage import PipelineStorage
from graphrag.telemetry.decorators import trace_workflow
from graphrag.utils.storage import write_table_to_storage

logger = logging.getLogger(__name__)


@trace_workflow("load_input_documents")
async def run_workflow(
config: GraphRagConfig,
context: PipelineRunContext,
Expand Down
2 changes: 2 additions & 0 deletions graphrag/query/structured_search/global_search/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
)
from graphrag.query.llm.text_utils import try_parse_json_object
from graphrag.query.structured_search.base import BaseSearch, SearchResult
from graphrag.telemetry.decorators import trace_search_operation
from graphrag.tokenizer.tokenizer import Tokenizer

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -132,6 +133,7 @@ async def stream_search(
):
yield response

@trace_search_operation("global_search")
async def search(
self,
query: str,
Expand Down
2 changes: 2 additions & 0 deletions graphrag/query/structured_search/local_search/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
ConversationHistory,
)
from graphrag.query.structured_search.base import BaseSearch, SearchResult
from graphrag.telemetry.decorators import trace_search_operation
from graphrag.tokenizer.tokenizer import Tokenizer

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -48,6 +49,7 @@ def __init__(
self.callbacks = callbacks or []
self.response_type = response_type

@trace_search_operation("local_search")
async def search(
self,
query: str,
Expand Down
Loading
Loading