diff --git a/dash-apm-python/index.json b/dash-apm-python/index.json index 5c9d1bb..1b0e423 100644 --- a/dash-apm-python/index.json +++ b/dash-apm-python/index.json @@ -21,22 +21,22 @@ "text": "step_4.md" }, { - "title": "Introducing errors", - "text": "step_5.md", - "courseData": "step_5_background.sh" + "title": "Enable trace and logs", + "text": "step_5.md" }, { - "title": "Introducing latencies", + "title": "Introducing errors", "text": "step_6.md", "courseData": "step_6_background.sh" }, { - "title": "Enable trace and logs", + "title": "Adding manual spans", "text": "step_7.md" }, { - "title": "Adding manual spans", - "text": "step_8.md" + "title": "Introducing latencies", + "text": "step_8.md", + "courseData": "step_8_background.sh" }, { "title": "Enabling monitors", diff --git a/dash-apm-python/step_5.md b/dash-apm-python/step_5.md index d759918..cdd7999 100644 --- a/dash-apm-python/step_5.md +++ b/dash-apm-python/step_5.md @@ -1,8 +1,84 @@ -In this phase we are going to introduce an exception into our application -to see how we can track it down. +# Enable Datadog logs agent -`restart-services`{{execute interrupt}} to continue. +Add the following environment variables to the `agent` service in `docker-compose.yml`. -Open the `frontend` service page to continue. +`DD_LOGS_ENABLED=true`{{copy}} +`DD_LOGS_CONFIG_CONTAINER_COLLECT_ALL=true`{{copy}} -https://app.datadoghq.com/apm/service/frontend +Our service should look like: + +```yaml +agent: + environment: + - DD_API_KEY + - DD_APM_ENABLED=true + - DD_TAGS='env:apm-workshop' + - DD_LOGS_ENABLED=true + - DD_LOGS_CONFIG_CONTAINER_COLLECT_ALL=true +```{{copy}} + +# Enable trace id injection into logs + +Add the following environment variable to the `frontend`, `node`, `pumps`, +and `sensors` services in `docker-compose.yml`. + +`DD_LOGS_INTEGRATION=true`{{copy}} + +Our services should look like: + +```yaml +frontend: + env_file: ".env" + environment: + - DATADOG_SERVICE_NAME=frontend + - DATADOG_TRACE_AGENT_HOSTNAME=agent + - DD_ANALYTICS_ENABLED=true + - DD_LOGS_INTEGRATION=true +```{{copy}} + +``` yaml +node: + env_file: ".env" + environment: + - DD_SERVICE_NAME=users-api + - DD_TRACE_AGENT_HOSTNAME=agent + - DD_ANALYTICS_ENABLED=true + - DD_LOGS_INTEGRATION=true +```{{copy}} + +``` yaml +pumps: + env_file: ".env" + environment: + - FLASK_APP=pumps.py + - FLASK_DEBUG=1 + - POSTGRES_PASSWORD=postgres + - POSTGRES_USER=postgres + - DATADOG_SERVICE_NAME=pumps-service + - DATADOG_TRACE_AGENT_HOSTNAME=agent + - DD_ANALYTICS_ENABLED=true + - DD_LOGS_INTEGRATION=true +```{{copy}} + +``` yaml +sensors: + env_file: ".env" + environment: + - FLASK_APP=sensors.py + - FLASK_DEBUG=1 + - POSTGRES_PASSWORD=postgres + - POSTGRES_USER=postgres + - DATADOG_SERVICE_NAME=sensors-api + - DATADOG_TRACE_AGENT_HOSTNAME=agent + - DD_ANALYTICS_ENABLED=true + - DD_LOGS_INTEGRATION=true +```{{copy}} + + +Afterwards restart docker services: + +`restart-services`{{execute interrupt}} + +Finally, open logs dashboard: + +https://app.datadoghq.com/logs diff --git a/dash-apm-python/step_5_background.sh b/dash-apm-python/step_5_background.sh deleted file mode 100644 index e98a1ca..0000000 --- a/dash-apm-python/step_5_background.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -echo "WORKSHOP_ADD_ERRORS=true" >> /tracing-workshop/.env diff --git a/dash-apm-python/step_6.md b/dash-apm-python/step_6.md index c7b16a2..d759918 100644 --- a/dash-apm-python/step_6.md +++ b/dash-apm-python/step_6.md @@ -1,4 +1,4 @@ -In this phase we are going to introduce an latency into our application +In this phase we are going to introduce an exception into our application to see how we can track it down. `restart-services`{{execute interrupt}} to continue. diff --git a/dash-apm-python/step_6_background.sh b/dash-apm-python/step_6_background.sh index 4c394c9..e98a1ca 100644 --- a/dash-apm-python/step_6_background.sh +++ b/dash-apm-python/step_6_background.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -echo "WORKSHOP_ADD_LATENCY=true" >> /tracing-workshop/.env +echo "WORKSHOP_ADD_ERRORS=true" >> /tracing-workshop/.env diff --git a/dash-apm-python/step_7.md b/dash-apm-python/step_7.md index cdd7999..e69de29 100644 --- a/dash-apm-python/step_7.md +++ b/dash-apm-python/step_7.md @@ -1,84 +0,0 @@ -# Enable Datadog logs agent - -Add the following environment variables to the `agent` service in `docker-compose.yml`. - -`DD_LOGS_ENABLED=true`{{copy}} -`DD_LOGS_CONFIG_CONTAINER_COLLECT_ALL=true`{{copy}} - -Our service should look like: - -```yaml -agent: - environment: - - DD_API_KEY - - DD_APM_ENABLED=true - - DD_TAGS='env:apm-workshop' - - DD_LOGS_ENABLED=true - - DD_LOGS_CONFIG_CONTAINER_COLLECT_ALL=true -```{{copy}} - -# Enable trace id injection into logs - -Add the following environment variable to the `frontend`, `node`, `pumps`, -and `sensors` services in `docker-compose.yml`. - -`DD_LOGS_INTEGRATION=true`{{copy}} - -Our services should look like: - -```yaml -frontend: - env_file: ".env" - environment: - - DATADOG_SERVICE_NAME=frontend - - DATADOG_TRACE_AGENT_HOSTNAME=agent - - DD_ANALYTICS_ENABLED=true - - DD_LOGS_INTEGRATION=true -```{{copy}} - -``` yaml -node: - env_file: ".env" - environment: - - DD_SERVICE_NAME=users-api - - DD_TRACE_AGENT_HOSTNAME=agent - - DD_ANALYTICS_ENABLED=true - - DD_LOGS_INTEGRATION=true -```{{copy}} - -``` yaml -pumps: - env_file: ".env" - environment: - - FLASK_APP=pumps.py - - FLASK_DEBUG=1 - - POSTGRES_PASSWORD=postgres - - POSTGRES_USER=postgres - - DATADOG_SERVICE_NAME=pumps-service - - DATADOG_TRACE_AGENT_HOSTNAME=agent - - DD_ANALYTICS_ENABLED=true - - DD_LOGS_INTEGRATION=true -```{{copy}} - -``` yaml -sensors: - env_file: ".env" - environment: - - FLASK_APP=sensors.py - - FLASK_DEBUG=1 - - POSTGRES_PASSWORD=postgres - - POSTGRES_USER=postgres - - DATADOG_SERVICE_NAME=sensors-api - - DATADOG_TRACE_AGENT_HOSTNAME=agent - - DD_ANALYTICS_ENABLED=true - - DD_LOGS_INTEGRATION=true -```{{copy}} - - -Afterwards restart docker services: - -`restart-services`{{execute interrupt}} - -Finally, open logs dashboard: - -https://app.datadoghq.com/logs diff --git a/dash-apm-python/step_8.md b/dash-apm-python/step_8.md index e69de29..c7b16a2 100644 --- a/dash-apm-python/step_8.md +++ b/dash-apm-python/step_8.md @@ -0,0 +1,8 @@ +In this phase we are going to introduce an latency into our application +to see how we can track it down. + +`restart-services`{{execute interrupt}} to continue. + +Open the `frontend` service page to continue. + +https://app.datadoghq.com/apm/service/frontend diff --git a/dash-apm-python/step_8_background.sh b/dash-apm-python/step_8_background.sh new file mode 100644 index 0000000..4c394c9 --- /dev/null +++ b/dash-apm-python/step_8_background.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +echo "WORKSHOP_ADD_LATENCY=true" >> /tracing-workshop/.env