Goal: Recursively change String Dates into Unix Epoch timestamps.
-
This function dateToEpochConversion will recursively convert string dates to a UNIX Epoch timestamp.
-
The sample can convert to either seconds from Epoch or milliseconds from Epoch.
-
In addition the sample optionally rename fields by adding _ms or _sec suffix (note, arrays will not be renamed).
-
Requires Eventing Storage (or metadata collection) and a "source" collection.
-
Will operate on all documents where doc.type === "my_data_type".
-
dateToEpochConversion
-
Input Data/Mutation
-
Output Data/Mutation
// To run configure the settings for this Function, dateToEpochConversion, as follows:
//
// Version 7.0+
// "Listen to Location"
// bulk.data.source
// "Eventing Storage"
// rr100.eventing.metadata
// Binding(s)
// 1. "binding type", "alias name...", "bucket.scope.collection", "Access"
// "bucket alias", "src_col", "bulk.data.source", "read and write"
//
// Version 6.X
// "Source Bucket"
// source
// "MetaData Bucket"
// metadata
// Binding(s)
// 1. "binding type", "alias name...", "bucket", "Access"
// "bucket alias", "src_col", "source", "read and write"
function convertStrToEpoch(key, val, control) {
// We want to convert string dates to a Epoch tstamp, first we utilize
// a regex to filter, then if that passes we use Date.parse(..)
// to convert string to numeric timestamps, finally if the last argument
// is 'true' then add a suffix of "_ms" or "_sec" to the field name.
// Date.parse("2009/06/29 13:30:10", "yyyy/MM/dd HH:mm:ss");
// Date.parse("2020-06-24 09.55.22 -07:00", "yyyy-MM-dd HH:mm:ss Z");
// uses a self explanatory control document as follows:
// var control = [
// { "regx": /^\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d+$/g, "millis": true, "rename": true },
// { "regx": /^\d\d\d\d\/\d\d\/\d\d \d\d:\d\d:\d\d$/g, "millis": false, "rename": true }
// ];
for (var i=0;i<control.length;i++) {
var regxres = val.match(control[i].regx);
if (regxres !== null) {
var dt = Date.parse(val);
if (dt && dt !== null && !isNaN(dt)) {
var suffix = "";
var epochMs = dt // .getTime();
if (key !== "" && control[i].rename) {
if (control[i].millis) {
suffix = "_ms";
} else {
suffix = "_sec";
}
}
if (control[i].millis) {
val = epochMs;
} else {
val = Math.round(epochMs / 1000);
}
return { "key": key + suffix, "val": val, "epoch_ok": true };
}
}
}
return { "key": key, "val": val, "epoch_ok": false };
}
function converTsStringsToEpochNumbers(obj, control) {
// This does a deepCopy or deepClone but applies convertStrToEpoch where needed
// process terminal strings
if (obj && typeof obj === 'string') {
var retval = convertStrToEpoch("", obj, control);
if (retval.epoch_ok) obj = retval.val;
return obj;
}
if (!obj || typeof obj !== 'object') {
return obj;
}
// process Object
let newObj = {};
if (Array.isArray(obj)) {
newObj = obj.map(item => converTsStringsToEpochNumbers(item, control));
} else {
Object.keys(obj).forEach((k) => {
var retval = { "key": k, "val": obj[k] };
if (obj[k] && typeof obj[k] === 'string') {
retval = convertStrToEpoch(k, obj[k], control);
}
return newObj[retval.key] = converTsStringsToEpochNumbers(retval.val, control);
})
}
return newObj;
}
function OnUpdate(doc, meta) {
if (doc.type !== "my_data_type") return;
var rename_with_ms_or_sec = true; // rename our fields
// Apply four (4) different datetime search patterns across the entire document
var control = [
{ "regx": /^\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d+$/g, "millis": true, "rename": true },
{ "regx": /^\d\d\d\d\/\d\d\/\d\d \d\d:\d\d:\d\d$/g, "millis": false, "rename": true },
{ "regx": /^\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d+ .\d\d:\d\d$/g, "millis": false, "rename": true },
{ "regx": /^\d\d\d\d\/\d\d\/\d\d \d\d:\d\d:\d\d .\d\d:\d\d$/g, "millis": true, "rename": true }
];
var newdoc = converTsStringsToEpochNumbers(doc, control);
// alias src_col to our source collection. Will work since rev 6.5+ allowed to be in r/w mode
src_col[meta.id] = newdoc;
}
INPUT: KEY my_data_type::1001
{
"type": "my_data_type",
"id": 1001,
"datet1a": "2009/06/29 13:30:10",
"datet1b": "2009/06/29 13:30:10",
"datet1c": "2009/06/29 13:30:10 -07:00",
"datet1d": "2009/06/29 13:30:10 -06:00",
"datet2a": "2009-06-29 13:30:10.333",
"datet2b": "2009-06-29 13:30:10.333",
"datet2c": "2009-06-29 13:30:10.333 -07:00",
"datet2d": "2009-06-29 13:30:10.333 -06:00",
"subdoc_same_dates": {
"datet1a": "2009/06/29 13:30:10",
"datet1b": "2009/06/29 13:30:10",
"datet1c": "2009/06/29 13:30:10 -07:00",
"datet1d": "2009/06/29 13:30:10 -06:00",
"datet2a": "2009-06-29 13:30:10.333",
"datet2b": "2009-06-29 13:30:10.333",
"datet2c": "2009-06-29 13:30:10.333 -07:00",
"datet2d": "2009-06-29 13:30:10.333 -06:00",
"subsubdoc_two_dates": {
"datet1a": "2009/06/29 13:30:10",
"datet1b": "2009/06/29 13:30:10",
"dary": [
"2009/06/29 13:30:10",
"2009-06-29 13:30:10.333",
{ "datet1a": "2009/06/29 13:30:10" }
]
}
}
}
UPDATED/OUTPUT: KEY my_data_type::1001
{
"type": "my_data_type",
"id": 1001,
"datet1a_sec": 1246307410,
"datet1b_sec": 1246307410,
"datet1c_ms": 1246307410000,
"datet1d_ms": 1246303810000,
"datet2a_ms": 1246307410333,
"datet2b_ms": 1246307410333,
"datet2c_sec": 1246307410,
"datet2d_sec": 1246303810,
"subdoc_same_dates": {
"datet1a_sec": 1246307410,
"datet1b_sec": 1246307410,
"datet1c_ms": 1246307410000,
"datet1d_ms": 1246303810000,
"datet2a_ms": 1246307410333,
"datet2b_ms": 1246307410333,
"datet2c_sec": 1246307410,
"datet2d_sec": 1246303810,
"subsubdoc_two_dates": {
"datet1a_sec": 1246307410,
"datet1b_sec": 1246307410,
"dary": [
1246307410,
1246307410333,
{
"datet1a_sec": 1246307410
}
]
}
}
}