import { GraphQLClient, gql } from 'graphql-request';
import dotenv from 'dotenv';
dotenv.config();
const API_URL = process.env.CONSUELO_GRAPHQL_URL;
const API_KEY = process.env.CONSUELO_API_KEY;
const client = new GraphQLClient(API_URL, {
headers: {
authorization: `Bearer ${API_KEY}`,
},
});
// Configuration
const CONFIG = {
objectName: 'people', // Change to your object
batchSize: 50, // Records per batch (max 60)
delayMs: 1000, // Delay between batches (ms)
filters: {
// Your filter criteria
industry: { eq: 'Technology' },
},
updates: {
// Fields to update
tags: ['qualified'],
status: 'Active',
},
};
// Query to fetch records
const GET_RECORDS = gql\`
query GetRecords($first: Int!, $after: String, $filter: ${CONFIG.objectName}FilterInput) {
${CONFIG.objectName}(first: $first, after: $after, filter: $filter) {
edges {
node {
id
}
}
pageInfo {
hasNextPage
endCursor
}
}
}
\`;
// Mutation to update records
const UPDATE_RECORD = gql\`
mutation UpdateRecord($id: ID!, $data: ${CONFIG.objectName}UpdateInput!) {
update${capitalize(CONFIG.objectName)}(id: $id, data: $data) {
id
updatedAt
}
}
\`;
// Helper: Capitalize string
function capitalize(str) {
return str.charAt(0).toUpperCase() + str.slice(1);
}
// Fetch all records matching filter
async function fetchRecords() {
const records = [];
let hasNextPage = true;
let after = null;
while (hasNextPage) {
const data = await client.request(GET_RECORDS, {
first: 60,
after,
filter: CONFIG.filters,
});
const edges = data[CONFIG.objectName].edges;
records.push(...edges.map(e => e.node));
hasNextPage = data[CONFIG.objectName].pageInfo.hasNextPage;
after = data[CONFIG.objectName].pageInfo.endCursor;
console.log(\`Fetched \${records.length} records so far...\`);
}
return records;
}
// Update records in batches
async function updateRecords(records) {
let updated = 0;
let failed = 0;
for (let i = 0; i < records.length; i += CONFIG.batchSize) {
const batch = records.slice(i, i + CONFIG.batchSize);
console.log(\`Processing batch \${Math.floor(i / CONFIG.batchSize) + 1}/\${Math.ceil(records.length / CONFIG.batchSize)}...\`);
const promises = batch.map(record =>
client.request(UPDATE_RECORD, {
id: record.id,
data: CONFIG.updates,
}).then(() => {
updated++;
process.stdout.write('.');
}).catch(err => {
failed++;
console.error(\`Failed to update \${record.id}:\`, err.message);
})
);
await Promise.all(promises);
// Rate limit protection
if (i + CONFIG.batchSize < records.length) {
await new Promise(resolve => setTimeout(resolve, CONFIG.delayMs));
}
}
return { updated, failed };
}
// Main execution
async function main() {
console.log('Starting bulk update...');
console.log('Configuration:', CONFIG);
try {
// Step 1: Fetch records
console.log('--- Fetching Records ---');
const records = await fetchRecords();
console.log(\`Found \${records.length} records to update.\`);
if (records.length === 0) {
console.log('No records found. Exiting.');
return;
}
// Confirm before proceeding
const readline = (await import('readline')).default;
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const answer = await new Promise(resolve => {
rl.question(\`Update \${records.length} records? (yes/no): \`, resolve);
});
rl.close();
if (answer.toLowerCase() !== 'yes') {
console.log('Aborted.');
return;
}
// Step 2: Update records
console.log('--- Updating Records ---');
const result = await updateRecords(records);
// Report results
console.log('--- Results ---');
console.log(\`Total records: \${records.length}\`);
console.log(\`Updated: \${result.updated}\`);
console.log(\`Failed: \${result.failed}\`);
} catch (error) {
console.error('Error:', error);
process.exit(1);
}
}
main();