This commit is contained in:
michaelcade 2022-03-28 10:32:52 +01:00
parent 719d23e3c4
commit ac9ddf3ffd
18 changed files with 553 additions and 5 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 850 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 364 KiB

View File

@ -0,0 +1,13 @@
<source>
@type tail
format json
read_from_head true
tag docker.log
path /fluentd/log/containers/*/*-json.log
pos_file /tmp/container-logs.pos
</source>
# <match docker.log>
# @type file
# path /output/docker.log
# </match>

View File

@ -0,0 +1,26 @@
# where to send http logs
<match http-*.log>
@type elasticsearch
host elasticsearch
port 9200
index_name fluentd-http
type_name fluentd
</match>
#where to send file logs
<match file-myapp.log>
@type elasticsearch
host elasticsearch
port 9200
index_name fluentd-file
type_name fluentd
</match>
#where to send docker logs
<match docker.log>
@type elasticsearch
host elasticsearch
port 9200
index_name fluentd-docker
type_name fluentd
</match>

View File

@ -0,0 +1,20 @@
<source>
@type tail
format json
read_from_head true
tag file-myapp.log
path /fluentd/log/files/example-log.log
pos_file /tmp/example-log.log.pos
</source>
<filter file-myapp.log>
@type record_transformer
<record>
host_param "#{Socket.gethostname}"
</record>
</filter>
# <match file-myapp.log>
# @type file
# path /output/file-myapp.log
# </match>

View File

@ -0,0 +1,16 @@
################################################################
# This source reads tail of a file
@include file-fluent.conf
################################################################
# This source gets incoming logs over HTTP
@include http-fluent.conf
################################################################
# This source gets all logs from local docker host
@include containers-fluent.conf
################################################################
# Send all logs to elastic search
@include elastic-fluent.conf

View File

@ -0,0 +1,19 @@
<source>
@type http
port 9880
bind 0.0.0.0
body_size_limit 32m
keepalive_timeout 10s
</source>
<filter http-*.log>
@type record_transformer
<record>
host_param "#{Socket.gethostname}"
</record>
</filter>
# <match http-*.log>
# @type file
# path /output/http.log
# </match>

View File

@ -0,0 +1,49 @@
version: "3"
services:
fluentd:
container_name: fluentd
user: root
build:
context: .
image: fluentd
volumes:
- /var/lib/docker/containers:/fluentd/log/containers # Example: Reading docker logs
- ./file:/fluentd/log/files/ #Example: Reading logs from a file
- ./configurations:/fluentd/etc/
- ./logs:/output/ # Example: Fluentd will collect logs and store it here for demo
logging:
driver: "local"
# This app sends logs to Fluentd via HTTP
http-myapp:
container_name: http-myapp
image: alpine
volumes:
- ./http:/app
command: [ /bin/sh , -c , "apk add --no-cache curl && chmod +x /app/app.sh && ./app/app.sh"]
# This app writes logs to a local file
file-myapp:
container_name: file-myapp
image: alpine
volumes:
- ./file:/app
command: [ /bin/sh , -c , "chmod +x /app/app.sh && ./app/app.sh"]
elasticsearch: # port 9200
image: elasticsearch:7.9.1
container_name: elasticsearch
environment:
- node.name=elasticsearch
- cluster.initial_master_nodes=elasticsearch
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
ulimits:
memlock:
soft: -1
hard: -1
kibana:
image: kibana:7.9.1
container_name: kibana
ports:
- "5601:5601"
environment:
ELASTICSEARCH_URL: http://elasticsearch:9200
ELASTICSEARCH_HOSTS: http://elasticsearch:9200

View File

@ -0,0 +1,4 @@
FROM fluent/fluentd:v1.11-debian
USER root
RUN gem install fluent-plugin-elasticsearch

View File

@ -0,0 +1,7 @@
#!/bin/sh
while true
do
echo "Writing log to a file"
echo '{"app":"file-myapp"}' >> /app/example-log.log
sleep 5
done

View File

@ -0,0 +1,244 @@
This is a log
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}
{"app":"file-myapp"}

View File

@ -0,0 +1,7 @@
#!/bin/sh
while true
do
echo "Sending logs to FluentD"
curl -X POST -d 'json={"foo":"bar"}' http://fluentd:9880/http-myapp.log
sleep 5
done

View File

@ -0,0 +1,2 @@
fluentd will collect all container logs and write them
in this folder

View File

@ -0,0 +1,28 @@
# Introduction to Fluentd
## Collecting logs from files
Reading logs from a file we need an application that writes logs to a file. <br/>
Lets start one:
```
cd monitoring\logging\fluentd\introduction\
docker-compose up -d file-myapp
```
To collect the logs, lets start fluentd
```
docker-compose up -d fluentd
```
## Collecting logs over HTTP (incoming)
```
cd monitoring\logging\fluentd\introduction\
docker-compose up -d http-myapp
```

View File

@ -1,13 +1,70 @@
## Fluentd
### Fluentd
Another data collector that I wanted to explore as part of this observability section was [Fluentd](https://docs.fluentd.org/). An Open-Source unified logging layer.
Fluentd treats logs as JSON
[Installing Fluentd](https://docs.fluentd.org/quickstart#step-1-installing-fluentd)
## Collecting logs from files
We all have applications that write to a `.log` file format. Fluentd has the ability to read logs from a file, with the configuration we have set, we will cover that shortly. Below you can see me bringing up the two containers, `docker-compose up -d file-myapp` and then `docker-compose up -d fluentd`
![](Images/Day81_Monitoring1.png)
If you watch [Introduction to Fluentd: Collect logs and send almost anywhere](https://www.youtube.com/watch?v=Gp0-7oVOtPw&t=447s)from the "That DevOps Guy" (Some amazing content that has been linked throughout this whole challenge) I am using his example here.
With the container `file-myapp` there is a script in there to add to an example log file. You can see this happening below.
![](Images/Day81_Monitoring2.png)
The script that we have running inside of our `file-myapp` container looks like this:
```
#!/bin/sh
while true
do
echo "Writing log to a file"
echo '{"app":"file-myapp"}' >> /app/example-log.log
sleep 5
done
```
We are using the [tail plugin](https://docs.fluentd.org/input/tail) within fluentd which allows us to read those events from the tail of text files, similar to the `tail -F` command.
We can also use the [HTTP plugin](https://docs.fluentd.org/input/http) which allows us to send events through HTTP requests. This is what we will see with the `http-myapp` container in the repository. This container also runs a similar shell script to generate logs
```
#!/bin/sh
while true
do
echo "Sending logs to FluentD"
curl -X POST -d 'json={"foo":"bar"}' http://fluentd:9880/http-myapp.log
sleep 5
done
```
### Container logging
We are going to be using docker-compose to bring up a small demo environment that can demonstrate fluentd. The docker compose file is going to bring up the following containers.
container_name: fluentd
container_name: http-myapp
container_name: file-myapp
container_name: elasticsearch
container_name: kibana

View File

@ -4,3 +4,25 @@ In the previous section, we spoke about ELK Stack, which uses Logstash as the lo
If EFK stack is not enough we could take a look at APM (Application Performance Monitoring), SigNoz, which uses OpenTelemetry - a vendor-agnostic instrumentation library for generating telemetry data. OpenTelemetry is a project under Cloud Native Computing Foundation and is becoming the industry standard for creating portable telemetry data.
https://www.youtube.com/watch?v=idDu_jXqf4E&t=10s
## Resources
- [Understanding Logging: Containers & Microservices](https://www.youtube.com/watch?v=MMVdkzeQ848)
- [The Importance of Monitoring in DevOps](https://www.devopsonline.co.uk/the-importance-of-monitoring-in-devops/)
- [Understanding Continuous Monitoring in DevOps?](https://medium.com/devopscurry/understanding-continuous-monitoring-in-devops-f6695b004e3b)
- [DevOps Monitoring Tools](https://www.youtube.com/watch?v=Zu53QQuYqJ0)
- [Top 5 - DevOps Monitoring Tools](https://www.youtube.com/watch?v=4t71iv_9t_4)
- [How Prometheus Monitoring works](https://www.youtube.com/watch?v=h4Sl21AKiDg)
- [Introduction to Prometheus monitoring](https://www.youtube.com/watch?v=5o37CGlNLr8)
- [Promql cheat sheet with examples](https://www.containiq.com/post/promql-cheat-sheet-with-examples)
- [Log Management for DevOps | Manage application, server, and cloud logs with Site24x7](https://www.youtube.com/watch?v=J0csO_Shsj0)
- [Log Management what DevOps need to know](https://devops.com/log-management-what-devops-teams-need-to-know/)
- [What is ELK Stack?](https://www.youtube.com/watch?v=4X0WLg05ASw)
- [Fluentd simply explained](https://www.youtube.com/watch?v=5ofsNyHZwWE&t=14s)
See you on [Day 83](day83.md)

View File

@ -1,3 +1,37 @@
## Data Visualisation
## Data Visualisation - Grafana
https://devops.com/making-data-work-data-visualization-techniques/
A common tool that you will find cross-platform is Grafana, there are no doubt others but Grafana is a tool that I have seen spanning the virtual, cloud and cloud-native platforms so I wanted to cover this here in this section.
https://devops.com/making-data-work-data-visualization-techniques/
https://www.youtube.com/watch?v=YDtuwlNTzRc
### Observability
I think we should also finish this section off with some discussion about Observability (metrics, logging, tracing and more recently you could also add profiling to the pillars of observability)
https://www.youtube.com/watch?v=orsxOxQNzDQ
https://www.youtube.com/watch?v=ZVKrN1RLetI
https://www.youtube.com/watch?v=SoZZzB-yTOk
## Resources
- [Understanding Logging: Containers & Microservices](https://www.youtube.com/watch?v=MMVdkzeQ848)
- [The Importance of Monitoring in DevOps](https://www.devopsonline.co.uk/the-importance-of-monitoring-in-devops/)
- [Understanding Continuous Monitoring in DevOps?](https://medium.com/devopscurry/understanding-continuous-monitoring-in-devops-f6695b004e3b)
- [DevOps Monitoring Tools](https://www.youtube.com/watch?v=Zu53QQuYqJ0)
- [Top 5 - DevOps Monitoring Tools](https://www.youtube.com/watch?v=4t71iv_9t_4)
- [How Prometheus Monitoring works](https://www.youtube.com/watch?v=h4Sl21AKiDg)
- [Introduction to Prometheus monitoring](https://www.youtube.com/watch?v=5o37CGlNLr8)
- [Promql cheat sheet with examples](https://www.containiq.com/post/promql-cheat-sheet-with-examples)
- [Log Management for DevOps | Manage application, server, and cloud logs with Site24x7](https://www.youtube.com/watch?v=J0csO_Shsj0)
- [Log Management what DevOps need to know](https://devops.com/log-management-what-devops-teams-need-to-know/)
- [What is ELK Stack?](https://www.youtube.com/watch?v=4X0WLg05ASw)
- [Fluentd simply explained](https://www.youtube.com/watch?v=5ofsNyHZwWE&t=14s)
See you on [Day 84](day84.md)

View File

@ -128,7 +128,7 @@ This will not cover all things DevOps but it will cover the areas that I feel wi
- [✔️] 📈 80 > [ELK Stack](Days/day80.md)
- [🚧] 📈 81 > [Fluentd](Days/day81.md)
- [] 📈 82 > [EFK Stack](Days/day82.md)
- [] 📈 83 > [Data Visualisation](Days/day83.md)
- [] 📈 83 > [Data Visualisation - Grafana](Days/day83.md)
### Store & Protect Your Data