-
1768401801069167
event body
{ "appname": "supabase-db", "event_message": null, "id": "abe485b7-501d-4ca2-9a6d-76f78e8bf17e", "project": "default", "timestamp": 1768401801069167 } -
1768401801069240 PostgreSQL Database directory appears to contain a database; Skipping initialization
event body
{ "appname": "supabase-db", "event_message": "PostgreSQL Database directory appears to contain a database; Skipping initialization", "id": "3a79ca5f-fc92-4a95-9002-7fa85b0941fd", "project": "default", "timestamp": 1768401801069240 } -
1768401801069250
event body
{ "appname": "supabase-db", "event_message": null, "id": "a0e9292a-e19a-4ff9-9990-495e7d19d79d", "project": "default", "timestamp": 1768401801069250 } -
1768401810495270 172.26.0.1 2026-01-14 14:43:30.493 UTC [109] netdata@postgres FATAL: password authentication failed for user "netdata"
event body
{ "appname": "supabase-db", "event_message": "172.26.0.1 2026-01-14 14:43:30.493 UTC [109] netdata@postgres FATAL: password authentication failed for user \"netdata\"", "id": "133aeaa2-a421-441b-a827-4226642b7340", "metadata": { "host": "db-default", "parsed": { "error_severity": "FATAL", "timestamp": "2026-01-14T14:43:30.495270685Z" } }, "project": "default", "timestamp": 1768401810495270 } -
1768401810495331 172.26.0.1 2026-01-14 14:43:30.493 UTC [109] netdata@postgres DETAIL: Role "netdata" does not exist.
event body
{ "appname": "supabase-db", "event_message": "172.26.0.1 2026-01-14 14:43:30.493 UTC [109] netdata@postgres DETAIL: Role \"netdata\" does not exist.", "id": "873f4b67-76eb-457a-812f-85d2d39c5795", "project": "default", "timestamp": 1768401810495331 } -
1768401810495334 Connection matched pg_hba.conf line 89: "host all all 172.16.0.0/12 scram-sha-256"
event body
{ "appname": "supabase-db", "event_message": "\tConnection matched pg_hba.conf line 89: \"host all all 172.16.0.0/12 scram-sha-256\"", "id": "fe89a848-2910-4300-a1bc-5b4c0cd17bb7", "project": "default", "timestamp": 1768401810495334 } -
1769795992520475 172.26.0.5 2026-01-30 17:59:52.518 UTC [120] supabase_admin@_supabase FATAL: terminating connection due to administrator command
event body
{ "appname": "supabase-db", "event_message": "172.26.0.5 2026-01-30 17:59:52.518 UTC [120] supabase_admin@_supabase FATAL: terminating connection due to administrator command", "id": "b635f6aa-78cf-40ce-83d5-034602e726ba", "metadata": { "host": "db-default", "parsed": { "error_severity": "FATAL", "timestamp": "2026-01-30T17:59:52.520475579Z" } }, "project": "default", "timestamp": 1769795992520475 } -
1769795992521682 172.26.0.5 2026-01-30 17:59:52.518 UTC [120] supabase_admin@_supabase STATEMENT: START_REPLICATION SLOT cainophile_ir4a8foe LOGICAL 0/0 (proto_version '1', publication_names '"logflare_pub"')
event body
{ "appname": "supabase-db", "event_message": "172.26.0.5 2026-01-30 17:59:52.518 UTC [120] supabase_admin@_supabase STATEMENT: START_REPLICATION SLOT cainophile_ir4a8foe LOGICAL 0/0 (proto_version '1', publication_names '\"logflare_pub\"')", "id": "09cfd81c-accb-4c80-86d9-baa15f0cbeef", "project": "default", "timestamp": 1769795992521682 }
Send Logs to this Source
Source ID
You'll need this source ID for some integrations or libraries.
c0cae9aa-2095-4ae1-b97d-9f351dd4f43f
If you're hosted on Vercel setup our Vercel integration!
Install the Vercel integrationGigalixir
Install the Gigalixir command line tool, and navigate to your project directory.
gigalixir drains:add "http://localhost:4000/logs/logplex?api_key=your-super-secret-and-long-logflare-key-public&source=c0cae9aa-2095-4ae1-b97d-9f351dd4f43f"
Cloudflare App
Already on Cloudflare? Install the Cloudflare app and start sending logs now.
Heroku
Add our log drain with a simple command.
heroku drains:add "http://localhost:4000/logs/logplex?api_key=your-super-secret-and-long-logflare-key-public&source=c0cae9aa-2095-4ae1-b97d-9f351dd4f43f"
Elixir Logger
Using Elixir? Use our Logger backend to send your structured logs.
Setup the Logger backendElixir Agent
Watch log files on a server with our Elixir agent.
Install the agentJavascript
Use our official Pino transport to send log events from your Javascript project.
Setup the Pino transportGithub Webhook
Set your Github webhook to this Logflare endpoint and we'll ingest Github webhooks for you. This endpoint drops all keys ending in _url so it keeps your Github payloads in check.
http://localhost:4000/logs/github?api_key=your-super-secret-and-long-logflare-key-public&source=c0cae9aa-2095-4ae1-b97d-9f351dd4f43f
Github Action
Use our Github Action (thanks @gr2m) to easily log events coming from your repositories.
Setup the Github ActionFluent Bit
Watch log files on a server with this Fluent Bit output config.
[INPUT]
Name tail
Path /var/log/syslog
[OUTPUT]
Name http
Match *
tls On
Host api.logflare.app
Port 443
URI /logs/json?api_key=your-super-secret-and-long-logflare-key-public&source=c0cae9aa-2095-4ae1-b97d-9f351dd4f43f
Format json
Retry_Limit 5
json_date_format iso8601
json_date_key timestamp
Generic Webhook
Use the generic JSON ingest endpoint to generate log events from an external webhook.
e.g. you can set a Github webhook to send events to:
http://localhost:4000/logs/json?api_key=your-super-secret-and-long-logflare-key-public&source=c0cae9aa-2095-4ae1-b97d-9f351dd4f43f
Or send generic JSON events yourself.
curl -X "POST" "http://localhost:4000/logs/json?source=c0cae9aa-2095-4ae1-b97d-9f351dd4f43f" \
-H 'Content-Type: application/json; charset=utf-8' \
-H 'X-API-KEY: your-super-secret-and-long-logflare-key-public' \
-d $'[
{
"yellow": true,
"tags": [
"popular, tropical, organic"
],
"store": {
"state": "AZ",
"city": "Phoenix",
"zip": 85016,
"address": "123 W Main St"
},
"type": "fruit",
"name": "banana",
"qty": 12
}
]'
Custom API Request
Send logs via an HTTP request. This request body payload lets you send over a human readable event message in
the message field.
curl -X "POST" "http://localhost:4000/logs?source=c0cae9aa-2095-4ae1-b97d-9f351dd4f43f" \
-H 'Content-Type: application/json' \
-H 'X-API-KEY: your-super-secret-and-long-logflare-key-public' \
-d $'{
"event_message": "This is another log message.",
"metadata": {
"ip_address": "100.100.100.100",
"request_method": "POST",
"custom_user_data": {
"vip": true,
"id": 38,
"login_count": 154,
"company": "Apple",
"address": {
"zip": "11111",
"st": "NY",
"street": "123 W Main St",
"city": "New York"
}
},
"datacenter": "aws",
"request_headers": {
"connection": "close",
"user_agent": "chrome"
}
}
}'
Custom Cloudflare Worker
Customize the Cloudflare worker using the template below.
const makeid = length => {
let text = ""
const possible = "ABCDEFGHIJKLMNPQRSTUVWXYZ0123456789"
for (let i = 0; i < length; i += 1) {
text += possible.charAt(Math.floor(Math.random() * possible.length))
}
return text
}
const buildMetadataFromHeaders = headers => {
const responseMetadata = {}
Array.from(headers).forEach(([key, value]) => {
responseMetadata[key.replace(/-/g, "_")] = value
})
return responseMetadata
}
const WORKER_ID = makeid(6)
async function handleRequest(event) {
const {
request
} = event;
const rMeth = request.method
const rUrl = request.url
const uAgent = request.headers.get("user-agent")
const rHost = request.headers.get("host")
const cfRay = request.headers.get("cf-ray")
const cIP = request.headers.get("cf-connecting-ip")
const rCf = request.cf
const requestMetadata = buildMetadataFromHeaders(request.headers)
const sourceKey = "c0cae9aa-2095-4ae1-b97d-9f351dd4f43f"
const apiKey = "your-super-secret-and-long-logflare-key-public"
const t1 = Date.now()
const response = await fetch(request)
const originTimeMs = Date.now() - t1
const statusCode = response.status
const responseMetadata = buildMetadataFromHeaders(response.headers)
const logEntry = `${rMeth} | ${statusCode} | ${cIP} | ${cfRay} | ${rUrl} | ${uAgent}`
const logflareEventBody = {
source: sourceKey,
log_entry: logEntry,
metadata: {
response: {
headers: responseMetadata,
origin_time: originTimeMs,
status_code: response.status,
},
request: {
url: rUrl,
method: rMeth,
headers: requestMetadata,
cf: rCf,
},
logflare_worker: {
worker_id: WORKER_ID,
},
},
}
const init = {
method: "POST",
headers: {
"X-API-KEY": apiKey,
"Content-Type": "application/json",
"User-Agent": `Cloudflare Worker via ${rHost}`
},
body: JSON.stringify(logflareEventBody),
}
event.waitUntil(fetch("http://localhost:4000/logs", init))
return response
}
addEventListener("fetch", event => {
event.passThroughOnException()
event.respondWith(handleRequest(event))
})