Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
eef6916
Pin flask version for flask restx tests. (#931)
umaannamalai Oct 9, 2023
d577a69
Ignore new redis methods. (#932)
umaannamalai Oct 9, 2023
13e9891
Update CI Image (#930)
TimPansino Oct 9, 2023
43160af
Only get package version once (#928)
hmstepanek Oct 9, 2023
cc3e285
Cache Package Version Lookups (#946)
TimPansino Oct 19, 2023
5996de6
Fix Redis Generator Methods (#947)
TimPansino Oct 19, 2023
4721025
Automatic RPM System Updates (#948)
TimPansino Oct 23, 2023
c4ea3cb
Bedrock Testing Infrastructure (#937)
TimPansino Oct 24, 2023
2191684
Drop python 3.7 tests for Hypercorn (#954)
lrafeei Oct 30, 2023
b1ccfc1
Bedrock Sync Chat Completion Instrumentation (#953)
TimPansino Nov 2, 2023
b2512eb
Fix pyenv installation for devcontainer (#936)
TimPansino Nov 2, 2023
b12f7be
Remove duplicate kafka import hook (#956)
lrafeei Nov 2, 2023
989e38c
Feature bedrock cohere instrumentation (#955)
lrafeei Nov 3, 2023
d478b0d
AWS Bedrock Embedding Instrumentation (#957)
TimPansino Nov 3, 2023
1803b64
Add support for bedrock claude (#960)
hmstepanek Nov 6, 2023
277d0a5
Combine Botocore Tests (#959)
TimPansino Nov 6, 2023
b2e9e74
Handle 0.32.0.post1 version in tests (#963)
hmstepanek Nov 6, 2023
c602c2e
Merge branch 'develop-bedrock-instrumentation' into ai-preview
lrafeei Nov 6, 2023
336fa5b
Handle 0.32.0.post1 version in tests (#963)
hmstepanek Nov 6, 2023
ca6006e
Merge branch 'develop-openai-instrumentation' into ai-preview
lrafeei Nov 8, 2023
b7cd20a
Initial merge commit
lrafeei Nov 9, 2023
c5845af
Update moto
TimPansino Nov 8, 2023
dcbdabe
Test for Bedrock embeddings metrics
lrafeei Nov 9, 2023
0882ba4
Bedrock Error Tracing (#966)
TimPansino Nov 9, 2023
47cdfad
Merge branch 'develop-bedrock-instrumentation' into ai-preview
lrafeei Nov 9, 2023
b17e7a3
Fix expected chat completion tests
lrafeei Nov 9, 2023
e2985d2
Merge branch 'develop-openai-instrumentation' into ai-preview
lrafeei Nov 10, 2023
ec5c27b
Merge branch 'main' into ai-preview
lrafeei Nov 10, 2023
2f7253b
Remove commented out code
lrafeei Nov 10, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,12 @@ RUN mkdir -p ${HOME} && \
groupadd --gid ${GID} vscode && \
useradd --uid ${UID} --gid ${GID} --home ${HOME} vscode && \
chown -R ${UID}:${GID} /home/vscode

# Move pyenv installation
ENV PYENV_ROOT="${HOME}/.pyenv"
ENV PATH="$PYENV_ROOT/bin:$PYENV_ROOT/shims:${PATH}"
RUN mv /root/.pyenv /home/vscode/.pyenv && \
chown -R vscode:vscode /home/vscode/.pyenv

# Set user
USER ${UID}:${GID}
109 changes: 109 additions & 0 deletions .github/actions/update-rpm-config/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
name: "update-rpm-config"
description: "Set current version of agent in rpm config using API."
inputs:
agent-language:
description: "Language agent to configure (eg. python)"
required: true
default: "python"
target-system:
description: "Target System: prod|staging|all"
required: true
default: "all"
agent-version:
description: "3-4 digit agent version number (eg. 1.2.3) with optional leading v (ignored)"
required: true
dry-run:
description: "Dry Run"
required: true
default: "false"
production-api-key:
description: "API key for New Relic Production"
required: false
staging-api-key:
description: "API key for New Relic Staging"
required: false

runs:
using: "composite"
steps:
- name: Trim potential leading v from agent version
shell: bash
run: |
AGENT_VERSION=${{ inputs.agent-version }}
echo "AGENT_VERSION=${AGENT_VERSION#"v"}" >> $GITHUB_ENV

- name: Generate Payload
shell: bash
run: |
echo "PAYLOAD='{ \"system_configuration\": { \"key\": \"${{ inputs.agent-language }}_agent_version\", \"value\": \"${{ env.AGENT_VERSION }}\" } }'" >> $GITHUB_ENV

- name: Generate Content-Type
shell: bash
run: |
echo "CONTENT_TYPE='Content-Type: application/json'" >> $GITHUB_ENV

- name: Update Staging system configuration page
shell: bash
if: ${{ inputs.dry-run == 'false' && (inputs.target-system == 'staging' || inputs.target-system == 'all') }}
run: |
curl -X POST 'https://staging-api.newrelic.com/v2/system_configuration.json' \
-H "X-Api-Key:${{ inputs.staging-api-key }}" -i \
-H ${{ env.CONTENT_TYPE }} \
-d ${{ env.PAYLOAD }}

- name: Update Production system configuration page
shell: bash
if: ${{ inputs.dry-run == 'false' && (inputs.target-system == 'prod' || inputs.target-system == 'all') }}
run: |
curl -X POST 'https://api.newrelic.com/v2/system_configuration.json' \
-H "X-Api-Key:${{ inputs.production-api-key }}" -i \
-H ${{ env.CONTENT_TYPE }} \
-d ${{ env.PAYLOAD }}

- name: Verify Staging system configuration update
shell: bash
if: ${{ inputs.dry-run == 'false' && (inputs.target-system == 'staging' || inputs.target-system == 'all') }}
run: |
STAGING_VERSION=$(curl -X GET 'https://staging-api.newrelic.com/v2/system_configuration.json' \
-H "X-Api-Key:${{ inputs.staging-api-key }}" \
-H "${{ env.CONTENT_TYPE }}" | jq ".system_configurations | from_entries | .${{inputs.agent-language}}_agent_version")

if [ "${{ env.AGENT_VERSION }}" != "$STAGING_VERSION" ]; then
echo "Staging version mismatch: $STAGING_VERSION"
exit 1
fi

- name: Verify Production system configuration update
shell: bash
if: ${{ inputs.dry-run == 'false' && (inputs.target-system == 'prod' || inputs.target-system == 'all') }}
run: |
PROD_VERSION=$(curl -X GET 'https://api.newrelic.com/v2/system_configuration.json' \
-H "X-Api-Key:${{ inputs.production-api-key }}" \
-H "${{ env.CONTENT_TYPE }}" | jq ".system_configurations | from_entries | .${{inputs.agent-language}}_agent_version")

if [ "${{ env.AGENT_VERSION }}" != "$PROD_VERSION" ]; then
echo "Production version mismatch: $PROD_VERSION"
exit 1
fi

- name: (dry-run) Update Staging system configuration page
shell: bash
if: ${{ inputs.dry-run != 'false' && (inputs.target-system == 'staging' || inputs.target-system == 'all') }}
run: |
cat << EOF
curl -X POST 'https://staging-api.newrelic.com/v2/system_configuration.json' \
-H "X-Api-Key:**REDACTED**" -i \
-H ${{ env.CONTENT_TYPE }} \
-d ${{ env.PAYLOAD }}
EOF

- name: (dry-run) Update Production system configuration page
shell: bash
if: ${{ inputs.dry-run != 'false' && (inputs.target-system == 'prod' || inputs.target-system == 'all') }}
run: |
cat << EOF
curl -X POST 'https://api.newrelic.com/v2/system_configuration.json' \
-H "X-Api-Key:**REDACTED**" -i \
-H ${{ env.CONTENT_TYPE }} \
-d ${{ env.PAYLOAD }}
EOF
10 changes: 10 additions & 0 deletions .github/workflows/deploy-python.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,3 +80,13 @@ jobs:
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}

- name: Update RPM Config
uses: ./.github/actions/update-rpm-config
with:
agent-language: "python"
target-system: "all"
agent-version: "${{ github.ref_name }}"
dry-run: "false"
production-api-key: ${{ secrets.NEW_RELIC_API_KEY_PRODUCTION }}"
staging-api-key: ${{ secrets.NEW_RELIC_API_KEY_STAGING }}"
1 change: 0 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ jobs:

steps:
- uses: actions/checkout@v3

- uses: actions/setup-python@v4
with:
python-version: "3.10"
Expand Down
41 changes: 40 additions & 1 deletion newrelic/common/package_version_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,44 @@

import sys

try:
from functools import cache as _cache_package_versions
except ImportError:
from functools import wraps
from threading import Lock

_package_version_cache = {}
_package_version_cache_lock = Lock()

def _cache_package_versions(wrapped):
"""
Threadsafe implementation of caching for _get_package_version.

Python 2.7 does not have the @functools.cache decorator, and
must be reimplemented with support for clearing the cache.
"""

@wraps(wrapped)
def _wrapper(name):
if name in _package_version_cache:
return _package_version_cache[name]

with _package_version_cache_lock:
if name in _package_version_cache:
return _package_version_cache[name]

version = _package_version_cache[name] = wrapped(name)
return version

def cache_clear():
"""Cache clear function to mimic @functools.cache"""
with _package_version_cache_lock:
_package_version_cache.clear()

_wrapper.cache_clear = cache_clear
return _wrapper


# Need to account for 4 possible variations of version declaration specified in (rejected) PEP 396
VERSION_ATTRS = ("__version__", "version", "__version_tuple__", "version_tuple") # nosec
NULL_VERSIONS = frozenset((None, "", "0", "0.0", "0.0.0", "0.0.0.0", (0,), (0, 0), (0, 0, 0), (0, 0, 0, 0))) # nosec
Expand Down Expand Up @@ -67,6 +105,7 @@ def int_or_str(value):
return version


@_cache_package_versions
def _get_package_version(name):
module = sys.modules.get(name, None)
version = None
Expand All @@ -75,7 +114,7 @@ def _get_package_version(name):
if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"):
try:
# In Python3.10+ packages_distribution can be checked for as well
if hasattr(sys.modules["importlib"].metadata, "packages_distributions"): # pylint: disable=E1101
if hasattr(sys.modules["importlib"].metadata, "packages_distributions"): # pylint: disable=E1101
distributions = sys.modules["importlib"].metadata.packages_distributions() # pylint: disable=E1101
distribution_name = distributions.get(name, name)
else:
Expand Down
5 changes: 0 additions & 5 deletions newrelic/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2463,11 +2463,6 @@ def _process_module_builtin_defaults():
"newrelic.hooks.messagebroker_kafkapython",
"instrument_kafka_heartbeat",
)
_process_module_definition(
"kafka.consumer.group",
"newrelic.hooks.messagebroker_kafkapython",
"instrument_kafka_consumer_group",
)

_process_module_definition(
"logging",
Expand Down
74 changes: 35 additions & 39 deletions newrelic/hooks/datastore_redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@

import re

from newrelic.api.datastore_trace import DatastoreTrace
from newrelic.api.datastore_trace import DatastoreTrace, DatastoreTraceWrapper, wrap_datastore_trace
from newrelic.api.time_trace import current_trace
from newrelic.api.transaction import current_transaction
from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper
from newrelic.common.object_wrapper import wrap_function_wrapper
from newrelic.common.async_wrapper import coroutine_wrapper, async_generator_wrapper, generator_wrapper

_redis_client_sync_methods = {
"acl_dryrun",
Expand Down Expand Up @@ -136,6 +137,7 @@
"client_no_evict",
"client_pause",
"client_reply",
"client_setinfo",
"client_setname",
"client_tracking",
"client_trackinginfo",
Expand All @@ -162,7 +164,6 @@
"cluster_reset",
"cluster_save_config",
"cluster_set_config_epoch",
"client_setinfo",
"cluster_setslot",
"cluster_slaves",
"cluster_slots",
Expand Down Expand Up @@ -248,7 +249,7 @@
"hmset_dict",
"hmset",
"hrandfield",
"hscan_inter",
"hscan_iter",
"hscan",
"hset",
"hsetnx",
Expand Down Expand Up @@ -399,8 +400,8 @@
"syndump",
"synupdate",
"tagvals",
"tfcall",
"tfcall_async",
"tfcall",
"tfunction_delete",
"tfunction_list",
"tfunction_load",
Expand Down Expand Up @@ -473,6 +474,13 @@
"zunionstore",
}

_redis_client_gen_methods = {
"scan_iter",
"hscan_iter",
"sscan_iter",
"zscan_iter",
}

_redis_client_methods = _redis_client_sync_methods.union(_redis_client_async_methods)

_redis_multipart_commands = set(["client", "cluster", "command", "config", "debug", "sentinel", "slowlog", "script"])
Expand All @@ -498,50 +506,31 @@ def _instance_info(kwargs):


def _wrap_Redis_method_wrapper_(module, instance_class_name, operation):
def _nr_wrapper_Redis_method_(wrapped, instance, args, kwargs):
transaction = current_transaction()

if transaction is None:
return wrapped(*args, **kwargs)

dt = DatastoreTrace(product="Redis", target=None, operation=operation, source=wrapped)

transaction._nr_datastore_instance_info = (None, None, None)

with dt:
result = wrapped(*args, **kwargs)

host, port_path_or_id, db = transaction._nr_datastore_instance_info
dt.host = host
dt.port_path_or_id = port_path_or_id
dt.database_name = db

return result

name = "%s.%s" % (instance_class_name, operation)
wrap_function_wrapper(module, name, _nr_wrapper_Redis_method_)
if operation in _redis_client_gen_methods:
async_wrapper = generator_wrapper
else:
async_wrapper = None

wrap_datastore_trace(module, name, product="Redis", target=None, operation=operation, async_wrapper=async_wrapper)

def _wrap_asyncio_Redis_method_wrapper(module, instance_class_name, operation):
@function_wrapper
async def _nr_wrapper_asyncio_Redis_async_method_(wrapped, instance, args, kwargs):
transaction = current_transaction()
if transaction is None:
return await wrapped(*args, **kwargs)

with DatastoreTrace(product="Redis", target=None, operation=operation):
return await wrapped(*args, **kwargs)

def _wrap_asyncio_Redis_method_wrapper(module, instance_class_name, operation):
def _nr_wrapper_asyncio_Redis_method_(wrapped, instance, args, kwargs):
from redis.asyncio.client import Pipeline

if isinstance(instance, Pipeline):
return wrapped(*args, **kwargs)

# Method should be run when awaited, therefore we wrap in an async wrapper.
return _nr_wrapper_asyncio_Redis_async_method_(wrapped)(*args, **kwargs)
# Method should be run when awaited or iterated, therefore we wrap in an async wrapper.
return DatastoreTraceWrapper(wrapped, product="Redis", target=None, operation=operation, async_wrapper=async_wrapper)(*args, **kwargs)

name = "%s.%s" % (instance_class_name, operation)
if operation in _redis_client_gen_methods:
async_wrapper = async_generator_wrapper
else:
async_wrapper = coroutine_wrapper

wrap_function_wrapper(module, name, _nr_wrapper_asyncio_Redis_method_)


Expand Down Expand Up @@ -614,7 +603,15 @@ def _nr_Connection_send_command_wrapper_(wrapped, instance, args, kwargs):
except:
pass

transaction._nr_datastore_instance_info = (host, port_path_or_id, db)
# Find DatastoreTrace no matter how many other traces are inbetween
trace = current_trace()
while trace is not None and not isinstance(trace, DatastoreTrace):
trace = getattr(trace, "parent", None)

if trace is not None:
trace.host = host
trace.port_path_or_id = port_path_or_id
trace.database_name = db

# Older Redis clients would when sending multi part commands pass
# them in as separate arguments to send_command(). Need to therefore
Expand Down Expand Up @@ -666,7 +663,6 @@ def instrument_asyncio_redis_client(module):
if hasattr(class_, operation):
_wrap_asyncio_Redis_method_wrapper(module, "Redis", operation)


def instrument_redis_commands_core(module):
_instrument_redis_commands_module(module, "CoreCommands")

Expand Down
Loading