Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .changeset/nip-22-created-at-limits-coverage.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"nostream": patch
---

Improve NIP-22 `created_at` limit handling coverage and boundary reliability.

This adds integration coverage for accepted and rejected events across configured positive and negative `created_at` deltas, and keeps rejection semantics consistent (`rejected`) for out-of-range timestamps.
29 changes: 29 additions & 0 deletions test/integration/features/nip-22/nip-22.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
@nip-22
Feature: NIP-22 created_at timestamp limits
Scenario: Event with created_at at current time is accepted
Given someone called Alice
And created_at limits are set to maxPositiveDelta 900 and maxNegativeDelta 0
When Alice drafts a text_note event with content "test event" and created_at 0 seconds from now
Then Alice sends their last draft event successfully
When Alice subscribes to author Alice
Then Alice receives a text_note event from Alice with content "test event"

Scenario: Event with created_at above positive delta limit is rejected
Given someone called Alice
And created_at limits are set to maxPositiveDelta 900 and maxNegativeDelta 0
When Alice drafts a text_note event with content "test event" and created_at 910 seconds from now
Then Alice sends their last draft event unsuccessfully with reason containing "rejected"

Scenario: Event older than configured negative delta limit is rejected
Given someone called Alice
And created_at limits are set to maxPositiveDelta 900 and maxNegativeDelta 3600
When Alice drafts a text_note event with content "test event" and created_at -3601 seconds from now
Then Alice sends their last draft event unsuccessfully with reason containing "rejected"

Scenario: Event within configured negative delta limit is accepted
Given someone called Alice
And created_at limits are set to maxPositiveDelta 900 and maxNegativeDelta 3600
When Alice drafts a text_note event with content "test event" and created_at -3590 seconds from now
Then Alice sends their last draft event successfully
When Alice subscribes to author Alice
Then Alice receives a text_note event from Alice with content "test event"
104 changes: 104 additions & 0 deletions test/integration/features/nip-22/nip-22.feature.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import { After, Before, Given, Then, When } from '@cucumber/cucumber'
import { assocPath, pipe } from 'ramda'

import { CommandResult, MessageType } from '../../../../src/@types/messages'
import { createEvent, sendEvent } from '../helpers'

import { Event } from '../../../../src/@types/event'
import { expect } from 'chai'
import { isDraft } from '../shared'
import { SettingsStatic } from '../../../../src/utils/settings'
import WebSocket from 'ws'

const previousSettingsSnapshot = Symbol('nip22PreviousSettingsSnapshot')
const draftOffsetSeconds = Symbol('nip22DraftOffsetSeconds')

const setCreatedAtLimits = (maxPositiveDelta: number, maxNegativeDelta: number) => {
const settings = SettingsStatic._settings ?? SettingsStatic.createSettings()

SettingsStatic._settings = pipe(
assocPath(['limits', 'event', 'createdAt', 'maxPositiveDelta'], maxPositiveDelta),
assocPath(['limits', 'event', 'createdAt', 'maxNegativeDelta'], maxNegativeDelta),
)(settings) as any
}

Before({ tags: '@nip-22' }, function(this: any) {
this[previousSettingsSnapshot] = SettingsStatic._settings
})

After({ tags: '@nip-22' }, function(this: any) {
SettingsStatic._settings = this[previousSettingsSnapshot]
delete this[previousSettingsSnapshot]
})
Comment on lines +16 to +32
Copy link

Copilot AI Apr 20, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The @nip-22 After hook resets created_at limits to hard-coded defaults (900/0). Integration tests globally override limits.event.createdAt.maxPositiveDelta to 0 in test/integration/features/shared.ts to avoid time-based flakiness; restoring to 900 here will leak settings changes into subsequent scenarios/features and can cause flaky failures. Snapshot the previous settings (or previous createdAt limits) before modifying, and restore that snapshot in After instead of hard-coding values.

Copilot uses AI. Check for mistakes.

Given(/^created_at limits are set to maxPositiveDelta (\d+) and maxNegativeDelta (\d+)$/, function(
maxPositiveDelta: string,
maxNegativeDelta: string,
) {
setCreatedAtLimits(Number(maxPositiveDelta), Number(maxNegativeDelta))
})

When(/^(\w+) drafts a text_note event with content "([^"]+)" and created_at (-?\d+) seconds from now$/, async function(
name: string,
content: string,
offsetSeconds: string,
) {
const { pubkey, privkey } = this.parameters.identities[name]
const createdAt = Math.floor(Date.now() / 1000) + Number(offsetSeconds)

const event: Event = await createEvent(
{
pubkey,
kind: 1,
content,
created_at: createdAt,
Comment thread
cameri marked this conversation as resolved.
},
privkey,
)

const draftEvent = event as any
draftEvent[isDraft] = true
draftEvent[draftOffsetSeconds] = Number(offsetSeconds)

this.parameters.events[name].push(event)
})

Then(/^(\w+) sends their last draft event unsuccessfully with reason containing "([^"]+)"$/, async function(
name: string,
expectedReason: string,
) {
const ws = this.parameters.clients[name] as WebSocket

const event = this.parameters.events[name].findLast((lastEvent: Event) => (lastEvent as any)[isDraft])
if (!event) {
throw new Error(`No draft event found for ${name}`)
}

const draftEvent = event as any
const offsetSeconds = draftEvent[draftOffsetSeconds]

let eventToSend = event
if (typeof offsetSeconds === 'number') {
const { pubkey, privkey } = this.parameters.identities[name]
const createdAt = Math.floor(Date.now() / 1000) + offsetSeconds

eventToSend = await createEvent(
{
pubkey,
kind: event.kind,
content: event.content,
created_at: createdAt,
},
privkey,
)
}

delete draftEvent[isDraft]
delete draftEvent[draftOffsetSeconds]

const command = await sendEvent(ws, eventToSend, false) as CommandResult

expect(command[0]).to.equal(MessageType.OK)
expect(command[2]).to.equal(false)
expect(command[3].toLowerCase()).to.contain(expectedReason.toLowerCase())
})
148 changes: 143 additions & 5 deletions test/unit/handlers/event-message-handler.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import { identifyEvent, signEvent } from '../../../src/utils/event'
import { IncomingEventMessage, MessageType } from '../../../src/@types/messages'
import { CacheAdmissionState } from '../../../src/constants/caching'
import { Event } from '../../../src/@types/event'
import { EventKinds } from '../../../src/constants/base'
import { EventKinds, EventExpirationTimeMetadataKey, EventTags } from '../../../src/constants/base'
import { EventMessageHandler } from '../../../src/handlers/event-message-handler'
import { IUserRepository } from '../../../src/@types/repositories'
import { IWebSocketAdapter } from '../../../src/@types/adapters'
Expand Down Expand Up @@ -172,6 +172,23 @@ describe('EventMessageHandler', () => {
expect(strategyFactoryStub).not.to.have.been.called
})

it('rejects event if NIP-05 verification is required', async () => {
canAcceptEventStub.returns(undefined)
isEventValidStub.resolves(undefined)
isUserAdmitted.resolves(undefined)
sandbox.stub(EventMessageHandler.prototype, 'checkNip05Verification' as any).resolves('blocked: NIP-05 verification required')

await handler.handleMessage(message)

expect(onMessageSpy).to.have.been.calledOnceWithExactly([
MessageType.OK,
event.id,
false,
'blocked: NIP-05 verification required',
])
expect(strategyFactoryStub).not.to.have.been.called
})

it('rejects event if it is expired', async () => {
isEventValidStub.resolves(undefined)

Expand Down Expand Up @@ -280,6 +297,14 @@ describe('EventMessageHandler', () => {
})

describe('createdAt', () => {
it('returns undefined if event pubkey equals relay public key', () => {
sandbox.stub(EventMessageHandler.prototype, 'getRelayPublicKey' as any).returns(event.pubkey)
eventLimits.createdAt.maxPositiveDelta = 1
event.created_at += 999

expect((handler as any).canAcceptEvent(event)).to.be.undefined
})

describe('maxPositiveDelta', () => {
it('returns undefined if maxPositiveDelta is zero', () => {
eventLimits.createdAt.maxPositiveDelta = 0
Expand All @@ -291,9 +316,9 @@ describe('EventMessageHandler', () => {
eventLimits.createdAt.maxPositiveDelta = 100
event.created_at += 101

expect((handler as any).canAcceptEvent(event)).to.equal(
'rejected: created_at is more than 100 seconds in the future',
)
expect(
(handler as any).canAcceptEvent(event)
).to.equal('rejected: created_at is more than 100 seconds in the future')
})
})

Expand Down Expand Up @@ -616,6 +641,22 @@ describe('EventMessageHandler', () => {
}
})

it('returns reason if request to vanish relay tag does not match relay URL', async () => {
const privkey = '0000000000000000000000000000000000000000000000000000000000000001'
const unsignedEvent = await identifyEvent({
pubkey: '79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798',
created_at: 1700000000,
kind: EventKinds.REQUEST_TO_VANISH,
tags: [[EventTags.Relay, 'wss://another-relay.example']],
content: '',
})
const vanishEvent = await signEvent(privkey)(unsignedEvent)

return expect((handler as any).isEventValid(vanishEvent)).to.eventually.equal(
'invalid: request to vanish relay tag invalid',
)
})

it('returns undefined if event is valid', () => {
return expect((handler as any).isEventValid(event)).to.eventually.be.undefined
})
Expand Down Expand Up @@ -683,6 +724,36 @@ describe('EventMessageHandler', () => {
})
})

describe('isBlockedByRequestToVanish', () => {
beforeEach(() => {
handler = new EventMessageHandler(
{} as any,
() => null,
{} as any,
userRepository,
() =>
({
info: { relay_url: 'relay_url' },
}) as any,
{} as any,
{ hasKey: async () => false, setKey: async () => true } as any,
() => ({ hit: async () => false }),
)
})

it('returns undefined for request to vanish events', async () => {
event.kind = EventKinds.REQUEST_TO_VANISH

return expect((handler as any).isBlockedByRequestToVanish(event)).to.eventually.be.undefined
})

it("returns undefined if event pubkey equals relay's own public key", async () => {
sandbox.stub(EventMessageHandler.prototype, 'getRelayPublicKey' as any).returns(event.pubkey)

return expect((handler as any).isBlockedByRequestToVanish(event)).to.eventually.be.undefined
})
})

describe('isRateLimited', () => {
let eventLimits: EventLimits
let settings: Settings
Expand Down Expand Up @@ -743,6 +814,21 @@ describe('EventMessageHandler', () => {
return expect((handler as any).isRateLimited(event)).to.eventually.be.false
})

it("fulfills with false if event pubkey equals relay's own public key", async () => {
sandbox.stub(EventMessageHandler.prototype, 'getRelayPublicKey' as any).returns(event.pubkey)
eventLimits.rateLimits = [
{
period: 60000,
rate: 1,
},
]

const actualResult = await (handler as any).isRateLimited(event)

expect(actualResult).to.be.false
expect(rateLimiterHitStub).not.to.have.been.called
})

it('skips rate limiter if IP is whitelisted', async () => {
eventLimits.rateLimits = [
{
Expand Down Expand Up @@ -1098,6 +1184,17 @@ describe('EventMessageHandler', () => {
})

describe('caching', () => {
it('falls back to repository lookup when cache read fails', async () => {
cacheStub.getKey.rejects(new Error('cache unavailable'))
settings.limits.event.pubkey.minBalance = 100n
userRepositoryFindByPubkeyStub.resolves({ isAdmitted: true, balance: 150n })

await expect((handler as any).isUserAdmitted(event)).to.eventually.be.undefined

expect(userRepositoryFindByPubkeyStub).to.have.been.calledOnceWithExactly(event.pubkey)
expect(cacheStub.setKey).to.have.been.calledWith(`${event.pubkey}:is-admitted`, CacheAdmissionState.ADMITTED, 300)
})

it('fulfills with undefined and uses cache hit for admitted user without hitting DB', async () => {
cacheStub.getKey.resolves(CacheAdmissionState.ADMITTED)

Expand Down Expand Up @@ -1341,6 +1438,35 @@ describe('EventMessageHandler', () => {
})
})

describe('addExpirationMetadata', () => {
beforeEach(() => {
handler = new EventMessageHandler(
{} as any,
() => null,
{} as any,
userRepository,
() =>
({
info: { relay_url: 'relay_url' },
}) as any,
{} as any,
{ hasKey: async () => false, setKey: async () => true } as any,
() => ({ hit: async () => false }),
)
})

it('adds expiration metadata when expiration tag is present', () => {
const expiringEvent: Event = {
...event,
tags: [[EventTags.Expiration, '1665547000']],
}

const enriched = (handler as any).addExpirationMetadata(expiringEvent)

expect((enriched as any)[EventExpirationTimeMetadataKey]).to.equal(1665547000)
})
})

describe('processNip05Metadata', () => {
let settings: Settings
let nip05VerificationRepository: any
Expand Down Expand Up @@ -1422,6 +1548,18 @@ describe('EventMessageHandler', () => {
expect(verifyStub).not.to.have.been.called
})

it('ignores delete errors when kind-0 has no nip05 in content', async () => {
nip05VerificationRepository.deleteByPubkey.rejects(new Error('db down'))
event.kind = EventKinds.SET_METADATA
event.content = JSON.stringify({ name: 'alice' })

;(handler as any).processNip05Metadata(event)
await new Promise((resolve) => setTimeout(resolve, 10))

expect(nip05VerificationRepository.deleteByPubkey).to.have.been.calledOnceWithExactly(event.pubkey)
expect(verifyStub).not.to.have.been.called
})

it('does nothing when nip05 identifier is unparseable', async () => {
event.kind = EventKinds.SET_METADATA
event.content = JSON.stringify({ nip05: 'invalid-no-at-sign' })
Expand Down Expand Up @@ -1969,4 +2107,4 @@ describe('EventMessageHandler', () => {
expect(nip05VerificationRepository.upsert).to.have.been.calledOnce
})
})
})
})
Loading