Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion bin/config.js
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ module.exports = {
caching: {
service: loadFactory(config.get('CACHING_PROVIDER') || 'memory', 'caching'),
caching_redis_service: config.get('CACHING_REDIS_SERVICE'),
caching_redis_api_key: config.get('CACHING_REDIS_API_KEY')
caching_redis_api_key: config.get('CACHING_REDIS_API_KEY'),
caching_redis_port: config.get('CACHING_REDIS_PORT') || 6380
},
endpoints: {
service: config.get('SERVICE_ENDPOINT') || 'http://localhost:4000',
Expand Down
29 changes: 26 additions & 3 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,8 @@
"sinon": "^21.0.0",
"supertest": "^ 7.1.3",
"testcontainers": "^11.2.1",
"typescript": "5.8.3"
"typescript": "5.8.3",
"pako": "^2.1.0",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

duplicate? pako is already listed in dependencies.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It looks like it's pako v1 and v2 for backwards compat.

"pako-1": "npm:pako@^1.0.8"
Comment on lines +127 to +128
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: sort alphabetically.

}
}
18 changes: 17 additions & 1 deletion providers/caching/redis.js
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,23 @@ class RedisCache {

let result
try {
const buffer = Buffer.from(typeof cacheItem === 'string' ? cacheItem : cacheItem.toString(), 'base64')
// Ensure cacheItem is treated as a string
const dataString = typeof cacheItem === 'string' ? cacheItem : String(cacheItem)

// Detect format: base64 (new) vs binary string (old)
// Base64 only contains A-Z, a-z, 0-9, +, /, and optional = padding
const isBase64 = /^[A-Za-z0-9+/]+=*$/.test(dataString)
Copy link
Collaborator

@qtomlinson qtomlinson Jan 23, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Most of our cached values are objects and this works well for those cases. When a number is cached (e.g. statusService.requestCount), isBase64 becomes false positive. The number of such cases is small.


let buffer
if (isBase64) {
// NEW format: base64 encoded (written by Pako 2.1.0)
buffer = Buffer.from(dataString, 'base64')
} else {
// OLD format: binary string (written by Pako 1.0.8 with { to: 'string' })
// Use 'binary' encoding to preserve byte values
buffer = Buffer.from(dataString, 'binary')
}

result = pako.inflate(buffer, { to: 'string' })
} catch (err) {
// Disregard decompression errors gracefully as cache may be stored in an older format, missing or expired.
Expand Down
2 changes: 1 addition & 1 deletion providers/caching/redisConfig.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ function serviceFactory(options) {
const realOptions = options || {
service: config.get('CACHING_REDIS_SERVICE'),
apiKey: config.get('CACHING_REDIS_API_KEY'),
port: Number(config.get('CACHING_REDIS_PORT')) || 6380
port: Number(config.get('CACHING_REDIS_PORT'))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Was the removal of || 6380 here intentional? redis.js has it in the function signature default, and config.js has added the fallback.

}
return redis(realOptions)
}
Expand Down
191 changes: 191 additions & 0 deletions test/providers/caching/redis.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ const assert = require('assert')
const redisCache = require('../../../providers/caching/redis')
const { RedisCache } = require('../../../providers/caching/redis')
const { GenericContainer } = require('testcontainers')
const pako1 = require('pako-1')
const pako2 = require('pako')

const logger = {
info: () => {},
Expand Down Expand Up @@ -105,6 +107,195 @@ describe('Redis Cache', () => {
})
})

describe('backward compatibility (pako 1.x -> pako 2.x)', () => {
const objectPrefix = '*!~%'
let mockClient, cache
const store = {}

beforeEach(function () {
mockClient = {
get: async key => Promise.resolve(store[key]),
set: async (key, value) => {
store[key] = value
},
del: async key => {
store[key] = null
},
connect: async () => Promise.resolve(mockClient),
on: () => {},
quit: sinon.stub().resolves()
}
sandbox.stub(RedisCache, 'buildRedisClient').returns(mockClient)
cache = redisCache({ logger })
})

afterEach(function () {
sandbox.restore()
// Clear store
Object.keys(store).forEach(key => delete store[key])
})

describe('Format Detection', () => {
it('should detect old binary string format correctly', () => {
const oldData = 'xÚ+JMÉ,V°ª5´³0²ä\u0002\u0000\u0011î\u0003ê'
const isBase64 = /^[A-Za-z0-9+/]+=*$/.test(oldData)
assert.strictEqual(isBase64, false)
})

it('should detect new base64 format correctly', () => {
const newData = 'eJwrSszLLEnVUUpKLAIAESID6g=='
const isBase64 = /^[A-Za-z0-9+/]+=*$/.test(newData)
assert.strictEqual(isBase64, true)
})
})

describe('Reading OLD format data (pako 1.x binary string)', () => {
it('should read definition data (def_* key pattern)', async () => {
await cache.initialize()

const originalValue = {
coordinates: {
type: 'nuget',
provider: 'nuget',
namespace: null,
name: 'xunit.core',
revision: '2.1.0'
},
described: {
releaseDate: '2015-11-08T00:00:00.000Z',
urls: {
registry: 'https://www.nuget.org/packages/xunit.core/2.1.0',
download: 'https://www.nuget.org/api/v2/package/xunit.core/2.1.0'
}
},
licensed: {
declared: 'Apache-2.0 OR MIT'
},
files: 87,
_meta: {
schemaVersion: '1.6.1',
updated: '2015-11-08T12:00:00.000Z'
}
}

const serialized = objectPrefix + JSON.stringify(originalValue)

// compress with pako v1.x using binary string format (old format)
const oldFormatData = pako1.deflate(serialized, { to: 'string' })

// verify it's NOT base64 (binary string format)
assert.strictEqual(/^[A-Za-z0-9+/]+=*$/.test(oldFormatData), false)

store['def_nuget/nuget/-/xunit.core/2.1.0'] = oldFormatData

// read with NEW code (uses pako v2.x with format detection)
const result = await cache.get('def_nuget/nuget/-/xunit.core/2.1.0')

assert.deepStrictEqual(result, originalValue)
assert.strictEqual(result.coordinates.name, 'xunit.core')
assert.strictEqual(result.licensed.declared, 'Apache-2.0 OR MIT')
})

it('should read harvest data (hrv_* key pattern)', async () => {
await cache.initialize()

const originalValue = [
{
type: 'component',
url: 'cd:/pypi/pypi/-/backports.ssl_match_hostname/3.7.0.2'
}
]

const serialized = objectPrefix + JSON.stringify(originalValue)

// compress with pako v1.x using binary string format (old format)
const oldFormatData = pako1.deflate(serialized, { to: 'string' })

store['hrv_pypi/pypi/-/backports.ssl_match_hostname/3.7.0.2'] = oldFormatData

// read with NEW code (uses pako v2.x with format detection)
const result = await cache.get('hrv_pypi/pypi/-/backports.ssl_match_hostname/3.7.0.2')

assert.deepStrictEqual(result, originalValue)
assert.strictEqual(result.length, 1)
assert.strictEqual(result[0].type, 'component')
assert.strictEqual(result[0].url, 'cd:/pypi/pypi/-/backports.ssl_match_hostname/3.7.0.2')
})
})

describe('Reading NEW format data (pako 2.x base64)', () => {
it('should read definition data (def_* key pattern)', async () => {
await cache.initialize()

const originalValue = {
coordinates: {
type: 'npm',
provider: 'npmjs',
namespace: null,
name: 'lodash',
revision: '4.17.21'
},
described: {
releaseDate: '2021-02-20T00:00:00.000Z',
urls: {
registry: 'https://www.npmjs.com/package/lodash',
download: 'https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz'
}
},
licensed: {
declared: 'MIT'
},
files: 1054,
_meta: {
schemaVersion: '1.6.1',
updated: '2021-02-20T12:00:00.000Z'
}
}

const serialized = objectPrefix + JSON.stringify(originalValue)

// compress with pako v2.x using base64 format (new format)
const deflated = pako2.deflate(serialized)
const newFormatData = Buffer.from(deflated).toString('base64')

// verify it IS base64
assert.strictEqual(/^[A-Za-z0-9+/]+=*$/.test(newFormatData), true)

store['def_npm/npmjs/-/lodash/4.17.21'] = newFormatData

// read with NEW code (uses pako v2.x with format detection)
const result = await cache.get('def_npm/npmjs/-/lodash/4.17.21')

assert.deepStrictEqual(result, originalValue)
assert.strictEqual(result.coordinates.name, 'lodash')
assert.strictEqual(result.licensed.declared, 'MIT')
})

it('should read harvest data (hrv_* key pattern)', async () => {
await cache.initialize()

const originalValue = [
{ type: 'component', url: 'cd:/npm/npmjs/-/express/4.18.0' },
{ type: 'component', url: 'cd:/npm/npmjs/-/axios/1.6.0' }
]

const serialized = objectPrefix + JSON.stringify(originalValue)

// compress with pako v2.x using base64 format (new format)
const deflated = pako2.deflate(serialized)
const newFormatData = Buffer.from(deflated).toString('base64')

store['hrv_npm/npmjs/-/my-package/1.0.0'] = newFormatData

// read with NEW code (uses pako v2.x with format detection)
const result = await cache.get('hrv_npm/npmjs/-/my-package/1.0.0')

assert.deepStrictEqual(result, originalValue)
assert.strictEqual(result.length, 2)
assert.strictEqual(result[0].url, 'cd:/npm/npmjs/-/express/4.18.0')
})
})
})
xdescribe('Integration Test', () => {
let container, redisConfig

Expand Down