Custom Formats
loq supports adding custom log format parsers in two ways:
- Config files - Quick setup for personal/project use
- Code contribution - Add built-in parsers for everyone
Config File Setup
Config Locations
loq looks for config in these locations (in order):
./loq.config.ts- Project local./loq.config.js./.loqrc./.loqrc.json~/.loqrc- User config~/.loqrc.json~/.config/loq/config.json
TypeScript Config
Best for complex parsing logic:
// loq.config.ts
export default {
formats: [
{
name: 'my-app',
detect: /^\[\d{4}-\d{2}-\d{2}/,
parse: {
pattern: /^\[(?<timestamp>[^\]]+)\] (?<level>\w+): (?<message>.+)$/,
fields: {
timestamp: 'timestamp',
level: 'level',
message: 'message',
},
},
},
{
name: 'nginx-json',
detect: (line) => {
try {
const obj = JSON.parse(line);
return 'request_uri' in obj;
} catch {
return false;
}
},
parse: (line) => {
const obj = JSON.parse(line);
return {
timestamp: obj.time_iso8601,
level: obj.status >= 400 ? 'error' : 'info',
message: `${obj.request_method} ${obj.request_uri}`,
fields: obj,
};
},
},
],
aliases: {
errors: 'where level=error',
slow: 'where response_time>1000',
},
};JSON Config
Simpler option using regex patterns:
{
"formats": [
{
"name": "bracketed",
"detect": "^\\[\\d{4}",
"parse": {
"pattern": "^\\[([^\\]]+)\\] (\\w+): (.+)$",
"fields": {
"timestamp": 1,
"level": 2,
"message": 3
}
}
}
]
}Format Definition
Required Fields
| Field | Type | Description |
|---|---|---|
name | string | Unique identifier for the format |
detect | RegExp | string | function | How to identify this log format |
parse | object | function | How to parse log lines |
Detection Methods
Regex pattern:
detect: /^\[\d{4}-\d{2}-\d{2}/String (converted to regex):
"detect": "^\\[\\d{4}"Function (most flexible):
detect: (line) => {
try {
const obj = JSON.parse(line);
return 'my_special_field' in obj;
} catch {
return false;
}
}Parse Methods
Pattern-based:
parse: {
pattern: /^\[(?<timestamp>[^\]]+)\] (?<level>\w+): (?<message>.+)$/,
fields: {
timestamp: 'timestamp', // named group
level: 'level',
message: 'message',
},
}Function-based:
parse: (line) => {
const parts = line.split(' | ');
return {
timestamp: parts[0],
level: parts[1],
message: parts[2],
fields: {
custom_field: parts[3],
},
};
}LogEntry Structure
Your parser should return this structure:
interface LogEntry {
raw: string; // Original line (added automatically)
timestamp?: string; // ISO timestamp or parseable date
level?: string; // error, warn, info, debug, etc.
message?: string; // Main log message
fields?: Record<string, unknown>; // Additional fields
}Using Custom Formats
Once configured, loq auto-detects your format:
# Auto-detect
loq app.log
# Force specific format
loq app.log --format my-app
# Query custom fields
loq app.log where custom_field=valueCommand Aliases
Define shortcuts for common queries:
aliases: {
errors: 'where level=error',
slow: 'where response_time>1000',
today: 'where timestamp after today',
'5xx': 'where status>=500 and status<600',
}Usage:
loq app.log errors # expands to: where level=error
loq access.log slow # expands to: where response_time>1000
loq app.log errors limit 10 # can combine with other optionsExamples
Rails Log Format
{
name: 'rails',
detect: /^[DIWEF],\s*\[/,
parse: {
pattern: /^(?<level>[DIWEF]),\s*\[(?<timestamp>[^\]]+)\]\s*(?<pid>\d+)\s*(?<source>\w+)\s*--\s*:\s*(?<message>.*)$/,
fields: {
level: 'level',
timestamp: 'timestamp',
message: 'message',
pid: 'pid',
source: 'source',
},
},
}Docker JSON Logs
{
name: 'docker-json',
detect: (line) => {
try {
const obj = JSON.parse(line);
return 'log' in obj && 'stream' in obj;
} catch {
return false;
}
},
parse: (line) => {
const obj = JSON.parse(line);
return {
timestamp: obj.time,
level: obj.stream === 'stderr' ? 'error' : 'info',
message: obj.log.trim(),
fields: obj,
};
},
}Custom App with Metadata
{
name: 'my-service',
detect: /^\d{4}-\d{2}-\d{2}T.*\|/,
parse: (line) => {
const [timestamp, level, service, traceId, ...rest] = line.split(' | ');
return {
timestamp,
level: level.toLowerCase(),
message: rest.join(' | '),
fields: {
service,
traceId,
},
};
},
}Contributing Built-in Parsers
Want to add a parser for everyone? Follow these steps:
1. Create Parser File
// src/parser/formats/myformat.ts
import type { LogEntry, LogParser } from '../types';
export const myFormatParser: LogParser = {
name: 'myformat',
detect(line: string): boolean {
return line.startsWith('MYFORMAT:');
},
parse(line: string): LogEntry | null {
const match = line.match(/^MYFORMAT: \[(.+?)\] (\w+) - (.+)$/);
if (!match) return null;
return {
raw: line,
timestamp: match[1],
level: match[2],
message: match[3],
fields: {},
};
},
};2. Register Parser
// src/parser/auto-detect.ts
import { myFormatParser } from './formats/myformat';
const builtinParsers: LogParser[] = [
jsonParser,
apacheParser,
syslogParser,
clfParser,
myFormatParser, // Add here
];3. Add Tests
// tests/parser/myformat.test.ts
import { describe, expect, test } from 'bun:test';
import { myFormatParser } from '../../src/parser/formats/myformat';
describe('myformat parser', () => {
test('detects myformat lines', () => {
expect(myFormatParser.detect('MYFORMAT: [2024-01-01] INFO - Hello')).toBe(true);
expect(myFormatParser.detect('Some other log')).toBe(false);
});
test('parses myformat correctly', () => {
const result = myFormatParser.parse('MYFORMAT: [2024-01-01] INFO - Hello world');
expect(result).toEqual({
raw: 'MYFORMAT: [2024-01-01] INFO - Hello world',
timestamp: '2024-01-01',
level: 'INFO',
message: 'Hello world',
fields: {},
});
});
test('returns null for invalid lines', () => {
expect(myFormatParser.parse('invalid line')).toBeNull();
});
});4. Submit PR
- Fork the repo
- Create a branch:
git checkout -b add-myformat-parser - Make your changes
- Run tests:
bun test - Push and open a PR
Step-by-Step Example: Parsing Unstructured Logs
Let's walk through creating a parser for a common unstructured log format:
2025-01-15 08:00:35 ERROR [payment] Failed to process payment for order #12345
2025-01-15 08:00:40 DEBUG [scheduler] Running job: cleanup_sessions
2025-01-15 08:00:45 WARN [memory] Heap usage high: 1.5GB / 2GB (75%)Step 1: Analyze the Format
Break down the log structure:
2025-01-15 08:00:35- timestamp (YYYY-MM-DD HH:MM:SS)ERROR- log level (DEBUG, INFO, WARN, ERROR, FATAL)[payment]- component name in bracketsFailed to process...- the message
Step 2: Write the Detection Pattern
The detection pattern should match uniquely to avoid false positives:
// Matches: date + time + level keyword
detect: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} (DEBUG|INFO|WARN|ERROR|FATAL)/Step 3: Write the Parse Pattern
Use named capture groups for clarity:
parse: {
pattern: /^(?<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) (?<level>\w+)\s+\[(?<component>[^\]]+)\] (?<message>.+)$/,
fields: {
timestamp: 'timestamp',
level: 'level',
message: 'message',
},
}Step 4: Complete Config
// loq.config.ts
export default {
formats: [
{
name: 'myapp',
detect: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} (DEBUG|INFO|WARN|ERROR|FATAL)/,
parse: {
pattern: /^(?<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) (?<level>\w+)\s+\[(?<component>[^\]]+)\] (?<message>.+)$/,
fields: {
timestamp: 'timestamp',
level: 'level',
message: 'message',
},
},
},
],
};Step 5: Test It
# Now these queries work!
loq plain.log where level=error
loq plain.log where component=payment
loq plain.log where level=error and component=database
loq plain.log count by level
loq plain.log count by componentStep 6: Add the Component Field
To query by component, add it to the fields:
parse: {
pattern: /^(?<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) (?<level>\w+)\s+\[(?<component>[^\]]+)\] (?<message>.+)$/,
fields: {
timestamp: 'timestamp',
level: 'level',
message: 'message',
component: 'component', // Add this!
},
}More Real-World Examples
Kubernetes Pod Logs
{
name: 'k8s',
detect: /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z\s/,
parse: {
pattern: /^(?<timestamp>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z)\s+(?<level>\w+)\s+(?<message>.+)$/,
fields: {
timestamp: 'timestamp',
level: 'level',
message: 'message',
},
},
}Python Logging
{
name: 'python',
detect: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3} - \w+ -/,
parse: {
pattern: /^(?<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}) - (?<logger>\w+) - (?<level>\w+) - (?<message>.+)$/,
fields: {
timestamp: 'timestamp',
level: 'level',
message: 'message',
logger: 'logger',
},
},
}Java/Log4j
{
name: 'log4j',
detect: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} \[/,
parse: {
pattern: /^(?<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \[(?<thread>[^\]]+)\] (?<level>\w+)\s+(?<logger>\S+) - (?<message>.+)$/,
fields: {
timestamp: 'timestamp',
level: 'level',
message: 'message',
thread: 'thread',
logger: 'logger',
},
},
}Go/Zap Logger (Non-JSON)
{
name: 'zap',
detect: /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}[+-]\d{4}\s+(DEBUG|INFO|WARN|ERROR)/,
parse: {
pattern: /^(?<timestamp>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}[+-]\d{4})\s+(?<level>\w+)\s+(?<logger>\S+)\s+(?<message>[^\t]+)(?:\t(?<fields>.+))?$/,
fields: {
timestamp: 'timestamp',
level: 'level',
message: 'message',
logger: 'logger',
},
},
}Heroku Router Logs
{
name: 'heroku',
detect: /^.*heroku\[router\]:/,
parse: (line) => {
const match = line.match(/^(?<timestamp>\S+) .*heroku\[router\]: at=(?<status>\w+) method=(?<method>\w+) path="(?<path>[^"]+)".*status=(?<code>\d+).*connect=(?<connect>\d+)ms service=(?<service>\d+)ms/);
if (!match) return null;
const { timestamp, status, method, path, code, connect, service } = match.groups!;
return {
timestamp,
level: status === 'error' ? 'error' : (parseInt(code) >= 400 ? 'warn' : 'info'),
message: `${method} ${path} ${code}`,
fields: {
method,
path,
status: parseInt(code),
connect_ms: parseInt(connect),
service_ms: parseInt(service),
},
};
},
}Multiline Stack Traces
For logs with stack traces, you may need special handling:
{
name: 'java-multiline',
detect: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}/,
parse: (line) => {
// Skip continuation lines (stack traces)
if (line.startsWith('\t') || line.startsWith(' at ')) {
return null; // These get attached to previous entry
}
const match = line.match(/^(?<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) (?<level>\w+) (?<message>.+)$/);
if (!match) return null;
return {
timestamp: match.groups!.timestamp,
level: match.groups!.level.toLowerCase(),
message: match.groups!.message,
fields: {},
};
},
}Debugging Custom Formats
Test your format detection:
# See which format is detected
loq app.log --format auto --verbose
# Force your format to test parsing
loq app.log --format my-custom-formatCheck if fields are parsed correctly:
# Output as JSON to see all fields
loq app.log -o json limit 1