In the end, this is what I implemented (for demo purpose only - no related code is missing):
eventRoot.js:
import { combineReducers } from 'redux' import ranges from './events' import ids from './ids' import params from './params' import total from './total' export default resource => combineReducers({ ids: ids(resource), ranges: ranges(resource), params: params(resource) })
events.js:
import { GET_EVENTS_SUCCESS } from '@/state/types/data' export default resource => (previousState = {}, { type, payload, requestPayload, meta }) => { if (!meta || meta.resource !== resource) { return previousState } switch (type) { case GET_EVENTS_SUCCESS: const newState = Object.assign({}, previousState) payload.data[resource].forEach(record => {
There is also a data reducer, but it is associated with the parent reducer because of a universal implementation that is reused for general list responses. Event data is updated and the start / end property is deleted because it is composed by a range ( ISO 8601 interval time line ). This can be later used by moment.range or split on '/' to get the start / end data. I selected an array of range strings to make it easier to check for existing ranges as they can increase. I think that a primitive string comparison (including indexOf or es6) will be faster than a loop in a complex structure in such cases.
data.js (stripped down version):
import { END } from '@/state/types/fetch' import { GET_EVENTS } from '@/state/types/data' const cacheDuration = 10 * 60 * 1000 // ten minutes const addRecords = (newRecords = [], oldRecords, isEvent) => { // prepare new records and timestamp them const newRecordsById = newRecords.reduce((prev, record) => { if (isEvent) { const { start, end, ...rest } = record prev[record.id] = rest } else { prev[record.id] = record } return prev }, {}) const now = new Date() const newRecordsFetchedAt = newRecords.reduce((prev, record) => { prev[record.id] = now return prev }, {}) // remove outdated old records const latestValidDate = new Date() latestValidDate.setTime(latestValidDate.getTime() - cacheDuration) const oldValidRecordIds = oldRecords.fetchedAt ? Object.keys(oldRecords.fetchedAt).filter(id => oldRecords.fetchedAt[id] > latestValidDate) : [] const oldValidRecords = oldValidRecordIds.reduce((prev, id) => { prev[id] = oldRecords[id] return prev }, {}) const oldValidRecordsFetchedAt = oldValidRecordIds.reduce((prev, id) => { prev[id] = oldRecords.fetchedAt[id] return prev }, {}) // combine old records and new records const records = { ...oldValidRecords, ...newRecordsById } Object.defineProperty(records, 'fetchedAt', { value: { ...oldValidRecordsFetchedAt, ...newRecordsFetchedAt } }) // non enumerable by default return records } const initialState = {} Object.defineProperty(initialState, 'fetchedAt', { value: {} }) // non enumerable by default export default resource => (previousState = initialState, { payload, meta }) => { if (!meta || meta.resource !== resource) { return previousState } if (!meta.fetchResponse || meta.fetchStatus !== END) { return previousState } switch (meta.fetchResponse) { case GET_EVENTS: return addRecords(payload.data[resource], previousState, true) default: return previousState } }
This can be used by a calendar component with an event selector:
const convertDateTimeToDate = (datetime, timeZoneName) => { const m = moment.tz(datetime, timeZoneName) return new Date(m.year(), m.month(), m.date(), m.hour(), m.minute(), 0) } const compileEvents = (state, filter) => { const eventsRanges = state.events.list.ranges const events = [] state.events.list.ids.forEach(id => { if (eventsRanges[id]) { eventsRanges[id].forEach(range => { const [start, end] = range.split('/').map(d => convertDateTimeToDate(d))
And this is what the data structure in the reduction tools looks like:

Each time events are retrieved, their data is updated (if there is a change), and links are added. Here is a screenshot of decux diff after selecting a range of new events:

Hope this helps someone, I’ll just add that this is not a battle, but more proof of the concept that works.
[EDIT] I will probably translate part of this logic to the backend, since then there will be no need to split / join / delete properties.