diff --git a/src/autometrics/decorator.py b/src/autometrics/decorator.py index 9e8cfeb..6d3d253 100644 --- a/src/autometrics/decorator.py +++ b/src/autometrics/decorator.py @@ -1,13 +1,18 @@ """Autometrics module.""" import time import inspect - from functools import wraps from typing import overload, TypeVar, Callable, Optional, Awaitable from typing_extensions import ParamSpec + from .objectives import Objective from .tracker import get_tracker, Result -from .utils import get_module_name, get_caller_function, append_docs_to_docstring +from .utils import ( + get_function_name, + get_module_name, + get_caller_function, + append_docs_to_docstring, +) P = ParamSpec("P") @@ -85,13 +90,13 @@ def sync_decorator(func: Callable[P, T]) -> Callable[P, T]: """Helper for decorating synchronous functions, to track calls and duration.""" module_name = get_module_name(func) - func_name = func.__name__ + func_name = get_function_name(func) register_function_info(func_name, module_name) @wraps(func) def sync_wrapper(*args: P.args, **kwds: P.kwargs) -> T: start_time = time.time() - caller = get_caller_function() + caller = get_caller_function(func_name=func_name, module_name=module_name) try: if track_concurrency: @@ -120,13 +125,13 @@ def async_decorator(func: Callable[P, Awaitable[T]]) -> Callable[P, Awaitable[T] """Helper for decorating async functions, to track calls and duration.""" module_name = get_module_name(func) - func_name = func.__name__ + func_name = get_function_name(func) register_function_info(func_name, module_name) @wraps(func) async def async_wrapper(*args: P.args, **kwds: P.kwargs) -> T: start_time = time.time() - caller = get_caller_function() + caller = get_caller_function(func_name=func_name, module_name=module_name) try: if track_concurrency: diff --git a/src/autometrics/test_caller.py b/src/autometrics/test_caller.py new file mode 100644 index 0000000..2e90f86 --- /dev/null +++ b/src/autometrics/test_caller.py @@ -0,0 +1,63 @@ +"""Tests for caller tracking.""" +from functools import wraps +from prometheus_client.exposition import generate_latest + +from .decorator import autometrics +from .utils import get_caller_function, get_function_name, get_module_name + + +def test_caller_detection(): + """This is a test to see if the caller is properly detected.""" + + def dummy_decorator(func): + @wraps(func) + def dummy_wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return dummy_wrapper + + def another_decorator(func): + @wraps(func) + def another_wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return another_wrapper + + @dummy_decorator + @autometrics + @another_decorator + def foo(): + print(get_caller_function(func_name="foo", module_name=__name__)) + pass + + foo() + + func_name = get_function_name(foo) + + blob = generate_latest() + assert blob is not None + data = blob.decode("utf-8") + + expected = """function_calls_count_total{caller="test_caller_detection",function="test_caller_detection..foo",module="autometrics.test_caller",objective_name="",objective_percentile="",result="ok"} 1.0""" + assert "wrapper" not in data + assert expected in data + + +def test_caller_detection_external(): + """Same as above, but for external wrapper.""" + + from .test_caller_external import bar + + bar() + + caller = get_caller_function(depth=1) + func_name = get_function_name(bar) + module_name = get_module_name(bar) + + blob = generate_latest() + assert blob is not None + data = blob.decode("utf-8") + + expected = f"""function_calls_count_total{{caller="{caller}",function="{func_name}",module="{module_name}",objective_name="",objective_percentile="",result="ok"}} 1.0""" + assert "external_wrapper" not in data + assert expected in data diff --git a/src/autometrics/test_caller_external.py b/src/autometrics/test_caller_external.py new file mode 100644 index 0000000..ca08b7f --- /dev/null +++ b/src/autometrics/test_caller_external.py @@ -0,0 +1,27 @@ +"""Supplementary for caller tests.""" +from functools import wraps + +from .decorator import autometrics + + +def dummy_external_decorator(func): + @wraps(func) + def dummy_external_wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return dummy_external_wrapper + + +def another_external_decorator(func): + @wraps(func) + def another_external_wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return another_external_wrapper + + +@dummy_external_decorator +@autometrics +@another_external_decorator +def bar(): + pass diff --git a/src/autometrics/test_decorator.py b/src/autometrics/test_decorator.py index 35eb4b1..9301a4a 100644 --- a/src/autometrics/test_decorator.py +++ b/src/autometrics/test_decorator.py @@ -6,9 +6,8 @@ from .decorator import autometrics from .objectives import ObjectiveLatency, Objective, ObjectivePercentile - from .tracker import set_tracker, TrackerType -from .utils import get_caller_function +from .utils import get_caller_function, get_function_name, get_module_name def basic_function(sleep_duration: float = 0.0): @@ -62,9 +61,9 @@ def test_basic(self): # set up the function + basic variables caller = get_caller_function(depth=1) - assert caller is not None assert caller != "" - function_name = basic_function.__name__ + function_name = get_function_name(basic_function) + module_name = get_module_name(basic_function) wrapped_function = autometrics(basic_function) wrapped_function() @@ -72,17 +71,17 @@ def test_basic(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="{module_name}",objective_name="",objective_percentile="",result="ok"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_count{{function="{function_name}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data @pytest.mark.asyncio @@ -91,9 +90,9 @@ async def test_basic_async(self): # set up the function + basic variables caller = get_caller_function(depth=1) - assert caller is not None assert caller != "" - function_name = basic_async_function.__name__ + function_name = get_function_name(basic_async_function) + module_name = get_module_name(basic_async_function) wrapped_function = autometrics(basic_async_function) # Test that the function is *still* async after we wrap it @@ -105,17 +104,17 @@ async def test_basic_async(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="{module_name}",objective_name="",objective_percentile="",result="ok"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_count{{function="{function_name}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data def test_objectives(self): @@ -123,7 +122,6 @@ def test_objectives(self): # set up the function + objective variables caller = get_caller_function(depth=1) - assert caller is not None assert caller != "" objective_name = "test_objective" success_rate = ObjectivePercentile.P90 @@ -131,7 +129,8 @@ def test_objectives(self): objective = Objective( name=objective_name, success_rate=success_rate, latency=latency ) - function_name = basic_function.__name__ + function_name = get_function_name(basic_function) + module_name = get_module_name(basic_function) wrapped_function = autometrics(objective=objective)(basic_function) sleep_duration = 0.25 @@ -143,19 +142,19 @@ def test_objectives(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="{module_name}",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" assert total_count in data # Check the latency buckets for objective in ObjectiveLatency: count = 0 if float(objective.value) <= sleep_duration else 1 - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{objective.value}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" + query = f"""function_calls_duration_bucket{{function="{function_name}",le="{objective.value}",module="{module_name}",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_count = f"""function_calls_duration_count{{function="{function_name}",module="{module_name}",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="{module_name}",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_sum in data @pytest.mark.asyncio @@ -164,7 +163,6 @@ async def test_objectives_async(self): # set up the function + objective variables caller = get_caller_function(depth=1) - assert caller is not None assert caller != "" objective_name = "test_objective" success_rate = ObjectivePercentile.P90 @@ -172,7 +170,8 @@ async def test_objectives_async(self): objective = Objective( name=objective_name, success_rate=success_rate, latency=latency ) - function_name = basic_async_function.__name__ + function_name = get_function_name(basic_async_function) + module_name = get_module_name(basic_async_function) wrapped_function = autometrics(objective=objective)(basic_async_function) sleep_duration = 0.25 @@ -187,28 +186,28 @@ async def test_objectives_async(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="{module_name}",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" assert total_count in data # Check the latency buckets for objective in ObjectiveLatency: count = 0 if float(objective.value) <= sleep_duration else 1 - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{objective.value}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" + query = f"""function_calls_duration_bucket{{function="{function_name}",le="{objective.value}",module="{module_name}",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_count = f"""function_calls_duration_count{{function="{function_name}",module="{module_name}",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="{module_name}",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_sum in data def test_exception(self): """This is a test that covers exceptions.""" caller = get_caller_function(depth=1) - assert caller is not None assert caller != "" - function_name = error_function.__name__ + function_name = get_function_name(error_function) + module_name = get_module_name(error_function) wrapped_function = autometrics(error_function) with pytest.raises(RuntimeError) as exception: @@ -220,27 +219,27 @@ def test_exception(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="{module_name}",objective_name="",objective_percentile="",result="error"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_count{{function="{function_name}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data @pytest.mark.asyncio async def test_async_exception(self): """This is a test that covers exceptions.""" caller = get_caller_function(depth=1) - assert caller is not None assert caller != "" - function_name = error_async_function.__name__ + function_name = get_function_name(error_async_function) + module_name = get_module_name(error_async_function) wrapped_function = autometrics(error_async_function) # Test that the function is *still* async after we wrap it @@ -255,23 +254,24 @@ async def test_async_exception(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="{module_name}",objective_name="",objective_percentile="",result="error"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_count{{function="{function_name}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="{module_name}",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data def test_initialize_counters_sync(self): """This is a test to see if the function calls metric initializes at 0 after invoking the decorator.""" - function_name = never_called_function.__name__ + function_name = get_function_name(never_called_function) + module_name = get_module_name(never_called_function) wrapped_function = autometrics(never_called_function) # NOTE - Do not call the function! We want to see if we get counter data for it @@ -279,10 +279,10 @@ def test_initialize_counters_sync(self): assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="{module_name}",objective_name="",objective_percentile="",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" + total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="{module_name}",objective_name="",objective_percentile="",result="error"}} 0.0""" assert total_count_error in data def test_initialize_counters_sync_with_objective(self): @@ -292,7 +292,8 @@ def test_initialize_counters_sync_with_objective(self): success_rate = ObjectivePercentile.P90 objective = Objective(name=objective_name, success_rate=success_rate) - function_name = never_called_function.__name__ + function_name = get_function_name(never_called_function) + module_name = get_module_name(never_called_function) wrapped_function = autometrics(objective=objective)(never_called_function) # NOTE - Do not call the function! We want to see if we get counter data for it @@ -300,17 +301,18 @@ def test_initialize_counters_sync_with_objective(self): assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="{module_name}",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" + total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="{module_name}",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" assert total_count_error in data @pytest.mark.asyncio async def test_initialize_counters_async(self): """This is a test to see if the function calls metric initializes at 0 after invoking the decorator for an async function""" - function_name = never_called_async_function.__name__ + function_name = get_function_name(never_called_async_function) + module_name = get_module_name(never_called_async_function) wrapped_function = autometrics(never_called_async_function) # NOTE - Do not call the function! We want to see if we get counter data for it even without ever calling it @@ -318,10 +320,10 @@ async def test_initialize_counters_async(self): assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="{module_name}",objective_name="",objective_percentile="",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" + total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="{module_name}",objective_name="",objective_percentile="",result="error"}} 0.0""" assert total_count_error in data @pytest.mark.asyncio @@ -332,7 +334,8 @@ async def test_initialize_counters_async_with_objective(self): success_rate = ObjectivePercentile.P90 objective = Objective(name=objective_name, success_rate=success_rate) - function_name = never_called_async_function.__name__ + function_name = get_function_name(never_called_async_function) + module_name = get_module_name(never_called_async_function) wrapped_function = autometrics(objective=objective)(never_called_async_function) # NOTE - Do not call the function! We want to see if we get counter data for it even without ever calling it @@ -340,8 +343,8 @@ async def test_initialize_counters_async_with_objective(self): assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="{module_name}",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" + total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="{module_name}",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" assert total_count_error in data diff --git a/src/autometrics/tracker/test_concurrency.py b/src/autometrics/tracker/test_concurrency.py index 652d217..1154cf4 100644 --- a/src/autometrics/tracker/test_concurrency.py +++ b/src/autometrics/tracker/test_concurrency.py @@ -6,6 +6,7 @@ from .tracker import set_tracker, TrackerType from ..decorator import autometrics +from ..utils import get_function_name, get_module_name @autometrics(track_concurrency=True) @@ -19,6 +20,9 @@ async def test_concurrency_tracking_prometheus(monkeypatch): # because the library was already initialized with the OpenTelemetry tracker set_tracker(TrackerType.PROMETHEUS) + func_name = get_function_name(sleep) + module_name = get_module_name(sleep) + # Create a 200ms async task loop = asyncio.get_event_loop() task = loop.create_task(sleep(0.2)) @@ -33,7 +37,7 @@ async def test_concurrency_tracking_prometheus(monkeypatch): data = blob.decode("utf-8") print(data) assert ( - f"""# TYPE function_calls_concurrent gauge\nfunction_calls_concurrent{{function="sleep",module="test_concurrency"}} 1.0""" + f"""# TYPE function_calls_concurrent gauge\nfunction_calls_concurrent{{function="{func_name}",module="{module_name}"}} 1.0""" in data ) diff --git a/src/autometrics/utils.py b/src/autometrics/utils.py index ef65ab7..51b7a4e 100644 --- a/src/autometrics/utils.py +++ b/src/autometrics/utils.py @@ -1,19 +1,20 @@ import inspect +import gc import os +from types import FunctionType +from typing import Union +import pytest from collections.abc import Callable + from .prometheus_url import Generator def get_module_name(func: Callable) -> str: """Get the name of the module that contains the function.""" - func_name = func.__name__ - fullname = func.__qualname__ - filename = get_filename_as_module(func) - if fullname == func_name: - return filename - - classname = func.__qualname__.rsplit(".", 1)[0] - return f"{filename}.{classname}" + module = inspect.getmodule(func) + if module is None: + return get_filename_as_module(func) + return module.__name__ def get_filename_as_module(func: Callable) -> str: @@ -27,6 +28,11 @@ def get_filename_as_module(func: Callable) -> str: return module_part +def get_function_name(func: Callable) -> str: + """Get the name of the function.""" + return func.__qualname__ or func.__name__ + + def write_docs(func_name: str, module_name: str): """Write the prometheus query urls to the function docstring.""" generator = Generator(func_name, module_name) @@ -48,8 +54,43 @@ def append_docs_to_docstring(func, func_name, module_name): return f"{func.__doc__}\n{write_docs(func_name, module_name)}" -def get_caller_function(depth: int = 2): +def get_caller_function( + depth: Union[int, None] = None, + func_name: Union[str, None] = None, + module_name: Union[str, None] = None, +): """Get the name of the function. Default depth is 2 to get the caller of the caller of the function being decorated.""" - caller_frame = inspect.stack()[depth] - caller_function_name = caller_frame[3] + caller_function_name = "" + + if not gc.isenabled(): + return caller_function_name + + try: + # This loop is written specifically like this because iterating via for + # doesn't work correctly. The for loop will only iterate over the first + # element of the stack. + stack = inspect.stack() + # Skip the first element of the stack, which is the current function. + i = 1 + adjusted_depth = depth + 1 if depth is not None else None + while i < len(stack): + if adjusted_depth is not None and i == adjusted_depth: + caller_function_name = stack[i].function + break + # Skip the autometrics decorator. + if stack[i].frame.f_globals["__name__"] == "autometrics.decorator": + i += 1 + continue + referrers = gc.get_referrers(stack[i].frame.f_code) + referrers = [ref for ref in referrers if isinstance(ref, FunctionType)] + if len(referrers) != 1: + break + caller = get_function_name(referrers[0]) + module = get_module_name(referrers[0]) + if caller != func_name or module != module_name: + caller_function_name = caller + break + i += 1 + except Exception as e: + print(e) return caller_function_name