Ever get asked to do those "One Time" pushes of X amount of records to one of your company internal APIs? There's setting up all of the postman calls, getting the auth, etc. to contend with and what is seen as a small effort often comes with a lot of little time consumers attached to tax your day.
Here's a quick script you can start with to get through this chore more quickly.
For those of your in a hurry here it is:
import axios from 'axios';
import fs from 'fs';
import path from 'path';
import {colorize} from 'json-colorizer';
console.white = (args) => {
console.log(args);
return console;
}
console.green = (args) => {
console.log(`\x1b[32m${args}\x1b[0m`);
return console;
}
console.red = (args) => {
console.log(`\x1b[31m${args}\x1b[0m`);
return console;
}
const interpolateFeed = (feed) => {
// Then we go through the args and replace the {{placeholder}} values, if any, with the actual values.
for (const [key, value] of Object.entries(args)) {
feed = feed.replace(new RegExp(`{{${key}}}`, 'g'), value);
}
return JSON.parse(feed);
}
const mapFeedToRequests = (feed) => {
// First we get the $.config value from the feed.
const config = feed.config;
const keys = [
'url',
'headers',
'method',
'data',
'params'
]
// Then we iterate over the $.requests array and map each request to a new object.
// Checking first if the request has a value for the key, if not, we use the value from the config.
return feed.requests.map((request) => {
const newRequest = {};
for (const key of keys) {
newRequest[key] = request[key] || config[key];
}
// delete any undefined keys
Object.keys(newRequest).forEach((key) => newRequest[key] === undefined && delete newRequest[key]);
return newRequest;
});
}
const compileReport = (results) => {
// Process the results as a report
// Successes are those with
// - response.status in the 200 range,
// - no error property
const successes = [];
const errors = [];
for (const result of results) {
if (result.response && result.response.status >= 200 && result.response.status < 300) {
successes.push(result);
} else {
errors.push(result);
}
}
return {
successes,
errors
};
}
const processFeed = async (args) => {
const results = [];
try {
const rawFeed = fs.readFileSync(path.resolve(args.feed), 'utf8');
const interpolatedFeed = interpolateFeed(rawFeed);
const requests = mapFeedToRequests(interpolatedFeed);
for (let i = 0; i < requests.length; i++) {
const request = requests[i];
const axiosRequestConfig = {
url: request.url,
method: request.method ?? 'GET'
}
if (request.headers && Object.keys(request.headers).length > 0) {
axiosRequestConfig.headers = request.headers;
}
if (request.params && Object.keys(request.params).length > 0) {
axiosRequestConfig.params = request.params;
}
if (request.data) {
// if the data is not a string, we stringify it.
if (typeof request.data !== 'string') {
request.data = JSON.stringify(request.data);
}
axiosRequestConfig.data = request.data;
}
let response;
try {
response = await axios(axiosRequestConfig);
results.push({
request: request,
response: response.data
});
} catch (e) {
const feedEntry = interpolatedFeed.requests[i];
results.push({
request: feedEntry,
response: e.response.data,
error: e.message
});
}
}
} catch (error) {
console.error('Error reading feed file:', error.message);
process.exit(1);
}
const report = compileReport(results);
console.white('RESULTS: ').green(`Successes: ${report.successes.length}`).red(`Errors: ${report.errors.length}`);
console.log(colorize(JSON.stringify(report, null, 2)));
console.white('RESULTS: ').green(`Successes: ${report.successes.length}`).red(`Errors: ${report.errors.length}`);
}
// const rawArgs = process.argv.slice(2);
const rawArgs = ['-feed', 'tag-groups-insert.json', '-auth', 'BGr0H2IxQwYgJmeP3NvhPrXAeLSaWCj6IR/XU5QtjVu5Pm'];
const args = {};
// We want to assume that the first arg is a flag and the following arg is its value.
// Using a fori loop which increments by 2, we can get the flag and its value.
for (let i = 0; i < rawArgs.length; i += 2) {
args[rawArgs[i].slice(1)] = rawArgs[i + 1];
}
// If the records file is not provided, we exit the process.
if (!args.feed) {
console.error('Feed file not provided.');
// Illustrate example usage in purple text
console.log('\x1b[35m', 'Usage: node index.js -feed <path-to-feed-file>', '\x1b[0m');
process.exit(1);
}
processFeed(args);
Which is intended to process a JSON file structured thusly:
{
"config": {
"url": "https://{{env}}.api.endpoint/movies/",
"method": "POST",
"headers": {
"accept": "application/json",
"accept-language": "en-US,en;q=0.9",
"authorization": "Bearer {{authorization}}",
"x-api-version": "{{apiVersion}}"
}
},
"requests": [
{
"data": {
"title": "Frozen",
"genre": "Animation",
"releaseYear": 2013,
"status": "released",
"description": "A story of two sisters, Elsa and Anna, who embark on an adventure to save their kingdom."
}
},
{
"data": {
"title": "The Lion King",
"genre": "Animation",
"releaseYear": 1994,
"status": "released",
"description": "A young lion prince flees his kingdom only to learn the true meaning of responsibility and bravery."
}
},
{
"data": {
"title": "Beauty and the Beast",
"genre": "Animation",
"releaseYear": 1991,
"status": "released",
"description": "A young woman befriends a cursed prince who is trapped in the form of a beast."
}
},
{
"data": {
"title": "Moana",
"genre": "Animation",
"releaseYear": 2016,
"status": "released",
"description": "An adventurous teenager sails out on a daring mission to save her people."
}
},
{
"data": {
"title": "Aladdin",
"genre": "Animation",
"releaseYear": 1992,
"status": "released",
"description": "A kind-hearted street urchin vies for the love of a beautiful princess."
}
},
{
"data": {
"title": "Mulan",
"genre": "Animation",
"releaseYear": 1998,
"status": "released",
"description": "A young Chinese maiden disguises herself as a male warrior to save her father."
}
},
{
"data": {
"title": "Tangled",
"genre": "Animation",
"releaseYear": 2010,
"status": "released",
"description": "Rapunzel, a princess with magical long blonde hair, is held captive in a tower."
}
},
{
"data": {
"title": "Zootopia",
"genre": "Animation",
"releaseYear": 2016,
"status": "released",
"description": "In a city of anthropomorphic animals, a rookie bunny cop and a cynical con artist fox must work together."
}
}
]
}
The structure is intended to prevent the need to repeat yourself, favoring a source hierarchy:
Feed Request - This is the most specific context and will ignore shared values from the larger context of the Feed Config.
Feed Config - The values that are identical in all requests, these will be used if the same value is not already specified on the Feed Request.
- Arguments - Any flags you pass in will interpolate over the feed itself, config or individual requests.
Or, in other words, if you're using the same endpoint and authorization for all requests then you only need to declare it once! Either as an interpolated argument in each request or as a value in the feed.config
.
Interpolation Mechanism
The interpolation mechanism in our script is a crucial step that ensures our data placeholders are replaced with actual values before making any API calls. This process allows for dynamic insertion of values into our request payloads, making the script versatile and easily adaptable to different use cases. Let's dive into how the interpolation function works.
Interpolate Feed Function
The interpolateFeed
function takes a raw feed as input, which is typically read from a JSON file. This feed may contain placeholders in the format {{placeholder}}
. These placeholders need to be replaced with actual values, which are provided as arguments when running the script. Here's how the function accomplishes this:
const interpolateFeed = (feed) => {
// Then we go through the args and replace the {{placeholder}} values, if any, with the actual values.
for (const [key, value] of Object.entries(args)) {
feed = feed.replace(new RegExp(`{{${key}}}`, 'g'), value);
}
return JSON.parse(feed);
}
- Reading Arguments: The function iterates over each key-value pair in the
args
object, which contains the actual values to replace the placeholders.
Replacing Placeholders: It uses a regular expression to find and replace all occurrences of each placeholder with its corresponding value.
Parsing JSON: Finally, the function parses the modified string back into a JSON object for further processing.
Example Usage
Let's say our feed file contains the following JSON with placeholders:
{
"config": {
"url": "https://api.example.com/data",
"headers": {
"Authorization": "Bearer {{auth}}",
"Content-Type": "application/json"
},
"method": "POST"
},
"requests": [
{
"data": {
"id": 1,
"name": "{{name}}"
}
}
]
}
When running the script, we pass the arguments -auth
and -name
like so:
node script.js -feed feed.json -auth myAuthToken -name JohnDoe
The interpolateFeed
function will replace {{auth}}
with myAuthToken
and {{name}}
with JohnDoe
, resulting in the following interpolated feed:
{
"config": {
"url": "https://api.example.com/data",
"headers": {
"Authorization": "Bearer myAuthToken",
"Content-Type": "application/json"
},
"method": "POST"
},
"requests": [
{
"data": {
"id": 1,
"name": "JohnDoe"
}
}
]
}
Mapping Requests to Axios Configuration
Once the feed is interpolated, we need to convert each request into a format that Axios can understand. The mapFeedToRequests
function handles this transformation.
const mapFeedToRequests = (feed) => {
// First we get the $.config value from the feed.
const config = feed.config;
const keys = [
'url',
'headers',
'method',
'data',
'params'
]
// Then we iterate over the $.requests array and map each request to a new object.
// Checking first if the request has a value for the key, if not, we use the value from the config.
return feed.requests.map((request) => {
const newRequest = {};
for (const key of keys) {
newRequest[key] = request[key] || config[key];
}
// delete any undefined keys
Object.keys(newRequest).forEach((key) => newRequest[key] === undefined && delete newRequest[key]);
return newRequest;
});
}
Config Values: The function starts by extracting the global configuration (
config
) from the feed.Default Values: It defines a list of keys (
url
,headers
,method
,data
,params
) that are important for making an Axios request.Request Mapping: It iterates over each request in the feed and assigns values from the request itself or falls back to the global configuration if the specific request key is missing.
Cleanup: Any undefined keys in the resulting request object are deleted to ensure a clean and valid Axios request configuration.
Reporting
Finally we produce a beautifully styled/colored report (courtesy of json-colorizer
) detailing the results of your submission.
How You Might Extend This
Rate Limit Handling
When you're processing requests in bulk it's very common to come up against rate limits. When this happens we can incorporate libraries like axiosRetry
.
const axios = require('axios');
const axiosRetry = require('axios-retry');
// Initialize axios instance
const axiosInstance = axios.create({
baseURL: 'https://api.example.com',
timeout: 1000,
});
// Apply axiosRetry to the axios instance
axiosRetry(axiosInstance, {
retries: 3, // Number of retry attempts
retryDelay: (retryCount) => {
console.log(`Retry attempt: ${retryCount}`);
return retryCount * 1000; // Time between retries in milliseconds
},
retryCondition: (error) => {
// Retry on network errors or 5xx status codes
return axiosRetry.isNetworkOrIdempotentRequestError(error) || error.response.status >= 500;
},
});
// Make a request using the axios instance
axiosInstance.get('/data')
.then(response => {
console.log('Data:', response.data);
})
.catch(error => {
console.error('Error:', error);
});
Which would allow you concievably pass in arguments to address the parameters for retry, such as the number of times to retry, or the time between attempts.
Thanks for Reading
Share your thoughts in the comments below!