When working with branches on Oxide it creates a new DB instance along with your branch. What is the best practice to copy over data or initiate your database on branches with some seed data?
The following script allowed us to copy data from one DB to another.
const { Database, Day } = require("journeyapps");
const fetch = require('node-fetch');
async function run() {
const fromDB = await Database.instance({ baseUrl: 'https://run-testing-us.journeyapps.com/api/v4/{instance_id}', token: '{token}' });
const schema = JSON.parse(JSON.stringify(fromDB.schema));
const MAX_ROWS = 1000;
for (const modelName in schema.objects) {
console.log(modelName);
let data = await fromDB[modelName].all().toArray();
console.log(data.length);
let operationSet = new Array(Math.ceil(data.length / MAX_ROWS)).fill(0).map(() => ({
"operations": []
}));
for (const [i, object] of data.entries()) {
let foreignKeys = {};
for (const foreignKey in schema.objects[modelName].belongsTo) {
foreignKeys[foreignKey + '_id'] = object[foreignKey + '_id'];
}
operationSet[Math.floor(i / MAX_ROWS)].operations.push({
"method": "post",
"object": { ...{ type: modelName, id: object.id }, ...foreignKeys, ...JSON.parse(JSON.stringify(object)) }
})
}
for (const operations of operationSet) {
await fetch('https://run-testing-us.journeyapps.com/api/v4/{instance_id}/batch.json', {
method: 'post',
body: JSON.stringify(operations),
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer {token}'
},
});
}
}
}
run();
Hi Eugene,
Do you have any suggestions for migrating models with attachments, e.g. helper or logo files that are stored as an attachment in the DM that need to be migrated?
Hi Jason,
You could try something like this,
const { Database, Day } = require("journeyapps");
const fetch = require('node-fetch');
async function run() {
const fromDB = await Database.instance({ baseUrl: 'https://run-testing-us.journeyapps.com/api/v4/{instance_id}', token: '{token}' });
const schema = JSON.parse(JSON.stringify(fromDB.schema));
const MAX_ROWS = 1000;
for (var modelName in schema.objects) {
console.log(modelName);
let attachments = [];
for (const param in schema.objects[modelName].attributes) {
if (schema.objects[modelName].attributes[param].type == 'attachment') {
attachments.push(param);
}
}
let data = await fromDB[modelName].all().toArray();
console.log(data.length);
let operationSet = new Array(Math.ceil(data.length / MAX_ROWS)).fill(0).map(() => ({
"operations": []
}));
for (const [i, object] of data.entries()) {
let parseObject = JSON.parse(JSON.stringify(object));
if (attachments.length > 0) {
let attachmentPromisses = [];
let filenames = [];
for (const field of attachments) {
if (object[field] != null) {
attachmentPromisses.push(object[field].toBase64());
var filename = object[field].urls.original.split('/').pop().split('#')[0].split('?')[0];
console.log(filename);
} else {
attachmentPromisses.push(null);
filenames.push(null);
}
}
await Promise.all(attachmentPromisses).then(results => {
for (const [index, base64] of results.entries()) {
if (base64 != null) {
parseObject[attachments[index]] = { base64: results, filename: filenames[index] }
}
}
})
}
let foreignKeys = {};
for (const foreignKey in schema.objects[modelName].belongsTo) {
foreignKeys[foreignKey + '_id'] = object[foreignKey + '_id'];
}
operationSet[Math.floor(i / MAX_ROWS)].operations.push({
"method": "post",
"object": { ...{ type: modelName, id: object.id }, ...foreignKeys, ...parseObject }
})
}
for (const operations of operationSet) {
await fetch('https://run-testing-us.journeyapps.com/api/v4/{instance_id}/batch.json', {
method: 'post',
body: JSON.stringify(operations),
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer {token}'
},
});
}
}
}
run();
Awesome, thanks Eugene, I will give this a shot.
Important: It is important to remember that if you loop through each model in the schema to copy the data from one DB to another, you may inadvertently kick off webhooks. My suggestion would be to only copy over data that is required (e.g., Master data) or explicitly skip over models that have webhooks defined (e.g., report generation models).
If you want to run this task in CloudCode, it will look like this:
export async function run() {
const fromDB = await CloudCode.createRemoteDB('https://run-testing-us.journeyapps.com/', 'instanceID', {token: 'token' });
const schema = JSON.parse(JSON.stringify(fromDB.schema));
const MAX_ROWS = 1000;
for (const modelName in schema.objects) {
console.log(modelName);
let data = await fromDB[modelName].all().toArray();
console.log(data.length);
let operationSet = new Array(Math.ceil(data.length / MAX_ROWS)).fill(0).map(() => ({
"operations": []
}));
for (const [i, object] of data.entries()) {
let foreignKeys = {};
for (const foreignKey in schema.objects[modelName].belongsTo) {
foreignKeys[foreignKey + '_id'] = object[foreignKey + '_id'];
}
operationSet[Math.floor(i / MAX_ROWS)].operations.push({
"method": "post",
"object": { ...{ type: modelName, id: object.id }, ...foreignKeys, ...JSON.parse(JSON.stringify(object)) }
})
}
for (const operations of operationSet) {
await fetch('https://run-testing-us.journeyapps.com/api/v4/{instance_id}/batch.json', {
method: 'post',
body: JSON.stringify(operations),
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer {token}'
},
});
}
}
}
See the documentation on accessing other instances for more information on the CloudCode.createRemoteDB
function.
You can also replace the use of the backend API and define the destinationDB
using CloudCode.createRemoteDB
and make use of the Batch API.
Note that new deployments are now no longer created by default when creating a new branch. You can create a new deployment, or otherwise link the new branch to an existing deployment. See more information here.
To copy data between backends you can also use the Migrate JourneyDB Data
feature of JourneyScript Runner VS Code extension
This will also include attachments