-
1764991180000000 8.221.140.90 - - [06/Dec/2025:03:19:40 +0000] "GET / HTTP/1.1" 401 30 "-" "curl/7.64.1"
event body
{ "appname": "supabase-kong", "event_message": "8.221.140.90 - - [06/Dec/2025:03:19:40 +0000] \"GET / HTTP/1.1\" 401 30 \"-\" \"curl/7.64.1\"", "id": "d36a2448-17f9-49d5-ae63-3e1ec440d1da", "metadata": { "request": { "headers": { "cf_connecting_ip": "8.221.140.90", "user_agent": "curl/7.64.1" }, "method": "GET", "path": "/", "protocol": "HTTP/1.1" }, "response": { "status_code": 401 } }, "project": "default", "timestamp": 1764991180000000 } -
1764991622000000 172.18.0.2 - - [06/Dec/2025:03:27:02 +0000] "POST /auth/v1/token?grant_type=refresh_token HTTP/1.1" 200 1724 "-" "node"
event body
{ "appname": "supabase-kong", "event_message": "172.18.0.2 - - [06/Dec/2025:03:27:02 +0000] \"POST /auth/v1/token?grant_type=refresh_token HTTP/1.1\" 200 1724 \"-\" \"node\"", "id": "83205c85-c49a-4cc3-b473-74bbabe45de4", "metadata": { "request": { "headers": { "cf_connecting_ip": "172.18.0.2", "user_agent": "node" }, "method": "POST", "path": "/auth/v1/token?grant_type=refresh_token", "protocol": "HTTP/1.1" }, "response": { "status_code": 200 } }, "project": "default", "timestamp": 1764991622000000 } -
1764991801000000 172.18.0.8 - - [06/Dec/2025:03:30:01 +0000] "POST /analytics/v1/api/logs?source_name=postgres.logs HTTP/1.1" 200 41 "-" "Vector/0.28.1 (aarch64-unknown-linux-musl ff15924 2023-03-06)"
event body
{ "appname": "supabase-kong", "event_message": "172.18.0.8 - - [06/Dec/2025:03:30:01 +0000] \"POST /analytics/v1/api/logs?source_name=postgres.logs HTTP/1.1\" 200 41 \"-\" \"Vector/0.28.1 (aarch64-unknown-linux-musl ff15924 2023-03-06)\"", "id": "d7b07c3d-fdf2-42a0-a550-de7bc9238b40", "metadata": { "request": { "headers": { "cf_connecting_ip": "172.18.0.8", "user_agent": "Vector/0.28.1 (aarch64-unknown-linux-musl ff15924 2023-03-06)" }, "method": "POST", "path": "/analytics/v1/api/logs?source_name=postgres.logs", "protocol": "HTTP/1.1" }, "response": { "status_code": 200 } }, "project": "default", "timestamp": 1764991801000000 } -
1764992047000000 198.235.24.42 - - [06/Dec/2025:03:34:07 +0000] "GET / HTTP/1.1" 401 30 "-" "Hello from Palo Alto Networks, find out more about our scans in https://docs-cortex.paloaltonetworks.com/r/1/Cortex-Xpanse/Scanning-activity"
event body
{ "appname": "supabase-kong", "event_message": "198.235.24.42 - - [06/Dec/2025:03:34:07 +0000] \"GET / HTTP/1.1\" 401 30 \"-\" \"Hello from Palo Alto Networks, find out more about our scans in https://docs-cortex.paloaltonetworks.com/r/1/Cortex-Xpanse/Scanning-activity\"", "id": "877c49b6-b634-4147-9cd2-2120638b9146", "metadata": { "request": { "headers": { "cf_connecting_ip": "198.235.24.42", "user_agent": "Hello from Palo Alto Networks, find out more about our scans in https://docs-cortex.paloaltonetworks.com/r/1/Cortex-Xpanse/Scanning-activity" }, "method": "GET", "path": "/", "protocol": "HTTP/1.1" }, "response": { "status_code": 401 } }, "project": "default", "timestamp": 1764992047000000 }
Send Logs to this Source
Source ID
You'll need this source ID for some integrations or libraries.
8822e1d3-844e-4e66-9364-85ede7addabe
If you're hosted on Vercel setup our Vercel integration!
Install the Vercel integrationGigalixir
Install the Gigalixir command line tool, and navigate to your project directory.
gigalixir drains:add "http://localhost:4000/logs/logplex?api_key=your-super-secret-and-long-logflare-key-public&source=8822e1d3-844e-4e66-9364-85ede7addabe"
Cloudflare App
Already on Cloudflare? Install the Cloudflare app and start sending logs now.
Heroku
Add our log drain with a simple command.
heroku drains:add "http://localhost:4000/logs/logplex?api_key=your-super-secret-and-long-logflare-key-public&source=8822e1d3-844e-4e66-9364-85ede7addabe"
Elixir Logger
Using Elixir? Use our Logger backend to send your structured logs.
Setup the Logger backendElixir Agent
Watch log files on a server with our Elixir agent.
Install the agentJavascript
Use our official Pino transport to send log events from your Javascript project.
Setup the Pino transportGithub Webhook
Set your Github webhook to this Logflare endpoint and we'll ingest Github webhooks for you. This endpoint drops all keys ending in _url so it keeps your Github payloads in check.
http://localhost:4000/logs/github?api_key=your-super-secret-and-long-logflare-key-public&source=8822e1d3-844e-4e66-9364-85ede7addabe
Github Action
Use our Github Action (thanks @gr2m) to easily log events coming from your repositories.
Setup the Github ActionFluent Bit
Watch log files on a server with this Fluent Bit output config.
[INPUT]
Name tail
Path /var/log/syslog
[OUTPUT]
Name http
Match *
tls On
Host api.logflare.app
Port 443
URI /logs/json?api_key=your-super-secret-and-long-logflare-key-public&source=8822e1d3-844e-4e66-9364-85ede7addabe
Format json
Retry_Limit 5
json_date_format iso8601
json_date_key timestamp
Generic Webhook
Use the generic JSON ingest endpoint to generate log events from an external webhook.
e.g. you can set a Github webhook to send events to:
http://localhost:4000/logs/json?api_key=your-super-secret-and-long-logflare-key-public&source=8822e1d3-844e-4e66-9364-85ede7addabe
Or send generic JSON events yourself.
curl -X "POST" "http://localhost:4000/logs/json?source=8822e1d3-844e-4e66-9364-85ede7addabe" \
-H 'Content-Type: application/json; charset=utf-8' \
-H 'X-API-KEY: your-super-secret-and-long-logflare-key-public' \
-d $'[
{
"yellow": true,
"tags": [
"popular, tropical, organic"
],
"store": {
"state": "AZ",
"city": "Phoenix",
"zip": 85016,
"address": "123 W Main St"
},
"type": "fruit",
"name": "banana",
"qty": 12
}
]'
Custom API Request
Send logs via an HTTP request. This request body payload lets you send over a human readable event message in
the message field.
curl -X "POST" "http://localhost:4000/logs?source=8822e1d3-844e-4e66-9364-85ede7addabe" \
-H 'Content-Type: application/json' \
-H 'X-API-KEY: your-super-secret-and-long-logflare-key-public' \
-d $'{
"event_message": "This is another log message.",
"metadata": {
"ip_address": "100.100.100.100",
"request_method": "POST",
"custom_user_data": {
"vip": true,
"id": 38,
"login_count": 154,
"company": "Apple",
"address": {
"zip": "11111",
"st": "NY",
"street": "123 W Main St",
"city": "New York"
}
},
"datacenter": "aws",
"request_headers": {
"connection": "close",
"user_agent": "chrome"
}
}
}'
Custom Cloudflare Worker
Customize the Cloudflare worker using the template below.
const makeid = length => {
let text = ""
const possible = "ABCDEFGHIJKLMNPQRSTUVWXYZ0123456789"
for (let i = 0; i < length; i += 1) {
text += possible.charAt(Math.floor(Math.random() * possible.length))
}
return text
}
const buildMetadataFromHeaders = headers => {
const responseMetadata = {}
Array.from(headers).forEach(([key, value]) => {
responseMetadata[key.replace(/-/g, "_")] = value
})
return responseMetadata
}
const WORKER_ID = makeid(6)
async function handleRequest(event) {
const {
request
} = event;
const rMeth = request.method
const rUrl = request.url
const uAgent = request.headers.get("user-agent")
const rHost = request.headers.get("host")
const cfRay = request.headers.get("cf-ray")
const cIP = request.headers.get("cf-connecting-ip")
const rCf = request.cf
const requestMetadata = buildMetadataFromHeaders(request.headers)
const sourceKey = "8822e1d3-844e-4e66-9364-85ede7addabe"
const apiKey = "your-super-secret-and-long-logflare-key-public"
const t1 = Date.now()
const response = await fetch(request)
const originTimeMs = Date.now() - t1
const statusCode = response.status
const responseMetadata = buildMetadataFromHeaders(response.headers)
const logEntry = `${rMeth} | ${statusCode} | ${cIP} | ${cfRay} | ${rUrl} | ${uAgent}`
const logflareEventBody = {
source: sourceKey,
log_entry: logEntry,
metadata: {
response: {
headers: responseMetadata,
origin_time: originTimeMs,
status_code: response.status,
},
request: {
url: rUrl,
method: rMeth,
headers: requestMetadata,
cf: rCf,
},
logflare_worker: {
worker_id: WORKER_ID,
},
},
}
const init = {
method: "POST",
headers: {
"X-API-KEY": apiKey,
"Content-Type": "application/json",
"User-Agent": `Cloudflare Worker via ${rHost}`
},
body: JSON.stringify(logflareEventBody),
}
event.waitUntil(fetch("http://localhost:4000/logs", init))
return response
}
addEventListener("fetch", event => {
event.passThroughOnException()
event.respondWith(handleRequest(event))
})