Merged in feature/new-relic-logs (pull request #198)

Feature/new relic logs
This commit is contained in:
Daniel Egger 2023-08-29 12:05:16 +00:00
commit e96c21f623
16 changed files with 111 additions and 71 deletions

View File

@ -168,6 +168,10 @@ It seems that right now, you have to make a manual step on Azure to use this new
Docker container and update it on Azure. Docker container and update it on Azure.
Please ask Lorenz for more information. Please ask Lorenz for more information.
#### Prod Monitoring on New Relic
See docs/new-relic.md
### CapRover vbv-develop ### CapRover vbv-develop
Bitbucket Pipelines name: develop Bitbucket Pipelines name: develop

View File

@ -15,4 +15,4 @@ else
python /app/manage.py migrate python /app/manage.py migrate
fi fi
/usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:7555 --chdir=/app -k uvicorn.workers.UvicornWorker newrelic-admin run-program gunicorn config.asgi --bind 0.0.0.0:7555 --chdir=/app -k uvicorn.workers.UvicornWorker

View File

@ -3,9 +3,20 @@
This document should help the user with a few commands, how to handle the azure app. This document should help the user with a few commands, how to handle the azure app.
# Get logs of the container ## Get logs of the container
```bash ```bash
az webapp log tail --resource-group VBV az webapp log tail --resource-group VBV
```
## Query for logs
```
AppServiceConsoleLogs
| extend logLevel = parse_json(ResultDescription).level
| where logLevel == "error"
| take 10
```
![azure_logs01.png](azure_logs01.png)

BIN
docs/azure_logs01.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 674 KiB

15
docs/new-relic.md Normal file
View File

@ -0,0 +1,15 @@
# New Relic
Die Applikation via Docker direkt in NewRelic als APM "vbv-prod-azure" eingebunden:
https://one.newrelic.com/nr1-core/apm/overview/MTgwMTYwfEFQTXxBUFBMSUNBVElPTnwxMDQ5Njk0MDU0
Ausserdem können die Applikations-Logs direkt im NewRelic eingesehen werden, innerhalb der APM Applikation
https://one.newrelic.com/nr1-core/logger/logs-summary/MTgwMTYwfEFQTXxBUFBMSUNBVElPTnwxMDQ5Njk0MDU0
Hier eine Query wie man nur die relevanten Logs mit einem "event"-Attribute und ohne label==security anzeigen kann:
```
event:* -"label":"security"
```
![new_relic_logs01.png](new_relic_logs01.png)

BIN
docs/new_relic_logs01.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 748 KiB

Binary file not shown.

View File

@ -8,7 +8,10 @@ import structlog
from environs import Env from environs import Env
from vbv_lernwelt.core.constants import DEFAULT_RICH_TEXT_FEATURES from vbv_lernwelt.core.constants import DEFAULT_RICH_TEXT_FEATURES
from vbv_lernwelt.core.utils import structlog_add_app_info from vbv_lernwelt.core.log_utils import (
structlog_add_app_info,
structlog_convert_to_json_for_new_relic,
)
SERVER_ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent.parent SERVER_ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent.parent
APPS_DIR = SERVER_ROOT_DIR / "vbv_lernwelt" APPS_DIR = SERVER_ROOT_DIR / "vbv_lernwelt"
@ -427,36 +430,16 @@ if IT_DJANGO_LOGGING_CONF == "IT_DJANGO_LOGGING_CONF_CONSOLE_COLOR":
cache_logger_on_first_use=True, cache_logger_on_first_use=True,
) )
else: else:
shared_processors = [
structlog.threadlocal.merge_threadlocal,
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
structlog_add_app_info,
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.CallsiteParameterAdder(),
]
LOGGING = { LOGGING = {
"version": 1, "version": 1,
"disable_existing_loggers": True, "disable_existing_loggers": True,
"formatters": { "formatters": {
"json": { "json": {
"()": structlog.stdlib.ProcessorFormatter, "()": "pythonjsonlogger.jsonlogger.JsonFormatter",
"processors": [ "format": "%(asctime)s %(levelname)s %(process)d %(thread)d %(name)s %(lineno)d %(funcName)s %(message)s",
structlog.stdlib.ProcessorFormatter.remove_processors_meta,
structlog.processors.JSONRenderer(),
],
"foreign_pre_chain": shared_processors,
}, },
}, },
"handlers": { "handlers": {
"file": {
"class": "concurrent_log_handler.ConcurrentRotatingFileHandler",
"filename": f"{SERVER_ROOT_DIR}/log/myservice.log",
"maxBytes": 1024 * 1024 * 100,
"backupCount": 50,
"formatter": "json",
},
"console": { "console": {
"level": "DEBUG", "level": "DEBUG",
"class": "logging.StreamHandler", "class": "logging.StreamHandler",
@ -465,30 +448,36 @@ else:
}, },
"loggers": { "loggers": {
"": { "": {
"handlers": ["console", "file"], "handlers": ["console"],
"level": "INFO", "level": "INFO",
}, },
"django": { "django": {
"handlers": ["console", "file"], "handlers": ["console"],
"level": "WARNING", "level": "WARNING",
"propagate": False, "propagate": False,
}, },
"vbv_lernwelt": { "vbv_lernwelt": {
"handlers": ["console", "file"], "handlers": ["console"],
"level": "DEBUG", "level": "DEBUG",
"propagate": False, "propagate": False,
}, },
"sentry_sdk": { "sentry_sdk": {
"level": "ERROR", "level": "ERROR",
"handlers": ["console", "file"], "handlers": ["console"],
"propagate": False, "propagate": False,
}, },
}, },
} }
structlog.configure( structlog.configure(
processors=shared_processors processors=[
+ [ structlog.stdlib.filter_by_level,
structlog.threadlocal.merge_threadlocal,
structlog_add_app_info,
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog_convert_to_json_for_new_relic,
structlog.stdlib.ProcessorFormatter.wrap_for_formatter, structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
], ],
context_class=dict, context_class=dict,

View File

@ -141,17 +141,21 @@ urlpatterns = [
name='file_upload_local'), name='file_upload_local'),
# feedback # feedback
path(r'api/core/feedback/<str:course_session_id>/summary/', get_expert_feedbacks_for_course, path(r'api/core/feedback/<str:course_session_id>/summary/',
get_expert_feedbacks_for_course,
name='feedback_summary'), name='feedback_summary'),
path(r'api/core/feedback/<str:course_session_id>/<str:circle_id>/', get_feedback_for_circle, path(r'api/core/feedback/<str:course_session_id>/<str:circle_id>/',
get_feedback_for_circle,
name='feedback_for_circle'), name='feedback_for_circle'),
# edoniq test # edoniq test
path(r'api/core/edoniq-test/redirect/', get_edoniq_token_redirect, path(r'api/core/edoniq-test/redirect/', get_edoniq_token_redirect,
name='get_edoniq_token_redirect'), name='get_edoniq_token_redirect'),
path(r'api/core/edoniq-test/export-users/', export_students, name='edoniq_export_students'), path(r'api/core/edoniq-test/export-users/', export_students,
path(r'api/core/edoniq-test/export-trainers/', export_trainers, name='edoniq_export_trainers'), name='edoniq_export_students'),
path(r'api/core/edoniq-test/export-trainers/', export_trainers,
name='edoniq_export_trainers'),
path(r'api/core/edoniq-test/export-users-trainers/', export_students_and_trainers, path(r'api/core/edoniq-test/export-users-trainers/', export_students_and_trainers,
name='edoniq_export_students_and_trainers'), name='edoniq_export_students_and_trainers'),
@ -183,16 +187,16 @@ urlpatterns = [
] ]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG: if not settings.APP_ENVIRONMENT.startswith('prod'):
# Static file serving when using Gunicorn + Uvicorn for local web socket development
urlpatterns += staticfiles_urlpatterns()
if settings.APP_ENVIRONMENT != 'production':
urlpatterns += [ urlpatterns += [
re_path(r'api/core/cypressreset/$', cypress_reset_view, re_path(r'api/core/cypressreset/$', cypress_reset_view,
name='cypress_reset_view'), name='cypress_reset_view'),
] ]
if settings.DEBUG:
# Static file serving when using Gunicorn + Uvicorn for local web socket development
urlpatterns += staticfiles_urlpatterns()
# fmt: on # fmt: on

View File

@ -1,8 +1,8 @@
# #
# This file is autogenerated by pip-compile with python 3.10 # This file is autogenerated by pip-compile with Python 3.10
# To update, run: # by the following command:
# #
# pip-compile requirements-dev.in # pip-compile --output-file=requirements-dev.txt requirements-dev.in
# #
aniso8601==9.0.1 aniso8601==9.0.1
# via graphene # via graphene
@ -313,6 +313,8 @@ mypy-extensions==1.0.0
# black # black
# mypy # mypy
# typing-inspect # typing-inspect
newrelic==8.11.0
# via -r requirements.in
nodeenv==1.8.0 nodeenv==1.8.0
# via pre-commit # via pre-commit
openpyxl==3.1.2 openpyxl==3.1.2

View File

@ -50,3 +50,4 @@ azure-identity
boto3 boto3
openpyxl openpyxl
newrelic

View File

@ -1,8 +1,8 @@
# #
# This file is autogenerated by pip-compile with python 3.10 # This file is autogenerated by pip-compile with Python 3.10
# To update, run: # by the following command:
# #
# pip-compile requirements.in # pip-compile --output-file=requirements.txt requirements.in
# #
aniso8601==9.0.1 aniso8601==9.0.1
# via graphene # via graphene
@ -196,6 +196,8 @@ msal==1.23.0
# msal-extensions # msal-extensions
msal-extensions==1.0.0 msal-extensions==1.0.0
# via azure-identity # via azure-identity
newrelic==8.11.0
# via -r requirements.in
openpyxl==3.1.2 openpyxl==3.1.2
# via # via
# -r requirements.in # -r requirements.in

View File

@ -0,0 +1,26 @@
import logging
from django.conf import settings
from structlog.typing import EventDict
from vbv_lernwelt.core.utils import safe_json_dumps
def structlog_convert_to_json_for_new_relic(
_: logging.Logger, __: str, event_dict: EventDict
) -> str:
"""
The *event_dict* is serialized to a json string, so that in New Relic logs
the nested keys will show up as attributes.
"""
return safe_json_dumps(event_dict)
def structlog_add_app_info(
_: logging.Logger, __: str, event_dict: EventDict
) -> EventDict:
event_dict["django_app"] = "vbv_lernwelt"
event_dict["app_environment"] = settings.APP_ENVIRONMENT
event_dict["django_app_dev_mode"] = f"vbv_lernwelt_{settings.APP_ENVIRONMENT}"
return event_dict

View File

@ -1,34 +1,19 @@
import logging import json
import re import re
import structlog
from django.conf import settings
from rest_framework.throttling import UserRateThrottle from rest_framework.throttling import UserRateThrottle
from structlog.types import EventDict
def structlog_add_app_info( class FailSafeJSONEncoder(json.JSONEncoder):
logger: logging.Logger, method_name: str, event_dict: EventDict def default(self, obj):
) -> EventDict: try:
event_dict["django_app"] = "vbv_lernwelt" return super(FailSafeJSONEncoder, self).default(obj)
event_dict["APP_ENVIRONMENT"] = settings.APP_ENVIRONMENT except Exception:
event_dict["django_app_dev_mode"] = f"vbv_lernwelt_{settings.APP_ENVIRONMENT}" return str(obj)
return event_dict
def structlog_inject_context_dict(test, level, event_dict): def safe_json_dumps(data, **kwargs):
""" return json.dumps(data, cls=FailSafeJSONEncoder, **kwargs)
Add the structlog context dict to log events generated by the stdlib logging library.
"""
context_class = structlog.get_config().get("context_class")
if context_class:
for key, value in context_class().items():
if key not in event_dict:
event_dict[key] = value
return event_dict
class HourUserRateThrottle(UserRateThrottle): class HourUserRateThrottle(UserRateThrottle):

View File

@ -133,7 +133,7 @@ def check_rate_limit(request):
@authentication_classes((authentication.SessionAuthentication,)) @authentication_classes((authentication.SessionAuthentication,))
@permission_classes((IsAdminUser,)) @permission_classes((IsAdminUser,))
def cypress_reset_view(request): def cypress_reset_view(request):
if settings.APP_ENVIRONMENT != "production": if not settings.APP_ENVIRONMENT.startswith("prod"):
call_command("cypress_reset") call_command("cypress_reset")
return HttpResponseRedirect("/server/admin/") return HttpResponseRedirect("/server/admin/")

View File

@ -6,6 +6,7 @@
"ignore hash 5": "1LhwZ0DvP4cGBgbBdCfaBQV7eiaOc4jWKdzO9WEXLFT7AaqBN6jqd0uyaZeAZ19K", "ignore hash 5": "1LhwZ0DvP4cGBgbBdCfaBQV7eiaOc4jWKdzO9WEXLFT7AaqBN6jqd0uyaZeAZ19K",
"ignore hash 6": "A035C8C19219BA821ECEA86B64E628F8D684696D", "ignore hash 6": "A035C8C19219BA821ECEA86B64E628F8D684696D",
"ignore hash 7": "96334b4eb6a7ae5b0d86abd7febcbcc67323bb94", "ignore hash 7": "96334b4eb6a7ae5b0d86abd7febcbcc67323bb94",
"ignore hash 8": "MTgwMTYwfEFQTXxBUFBMSUNBVElPTnwxMDQ5Njk0MDU0",
"json base64 content": "regex:\"content\": \"", "json base64 content": "regex:\"content\": \"",
"img base64 content": "regex:data:image/png;base64,.*", "img base64 content": "regex:data:image/png;base64,.*",
"sentry url": "https://2df6096a4fd94bd6b4802124d10e4b8d@o8544.ingest.sentry.io/4504157846372352", "sentry url": "https://2df6096a4fd94bd6b4802124d10e4b8d@o8544.ingest.sentry.io/4504157846372352",