This guide covers common issues, error scenarios, and their solutions when working with the MindPeeker platform. Use this as your first resource when encountering problems during development and integration.
Symptoms:
Error: 401 Unauthorized - Invalid API key
Solutions:
mindpeeker auth status
// Debug environment variables
console.log('API Key present:', !!process.env.MINDPEEKER_API_KEY);
console.log('API Key format:', process.env.MINDPEEKER_API_KEY?.substring(0, 10) + '...');
mindpeeker auth regenerate --type production
// Verify key has required permissions
const client = new MindPeekerClient({ apiKey: 'your_key' });
try {
await client.sessions.list({ limit: 1 });
console.log('Key has sessions:read permission');
} catch (error) {
console.log('Missing sessions:read permission');
}
Symptoms:
Error: 429 Too Many Requests - Rate limit exceeded
Solutions:
async function createSessionWithRetry(sessionData, maxRetries = 3) {
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
return await client.sessions.create(sessionData);
} catch (error) {
if (error.status === 429 && attempt < maxRetries) {
const retryAfter = error.headers['retry-after'] || Math.pow(2, attempt);
await new Promise(resolve => setTimeout(resolve, retryAfter * 1000));
continue;
}
throw error;
}
}
}
function checkRateLimits(response) {
const limit = response.headers['x-ratelimit-limit'];
const remaining = response.headers['x-ratelimit-remaining'];
const reset = response.headers['x-ratelimit-reset'];
console.log(`Rate limit: ${remaining}/${limit} (resets at ${new Date(reset * 1000)})`);
if (remaining < 10) {
console.warn('Approaching rate limit - consider implementing backoff');
}
}
mindpeeker account limits
mindpeeker account upgrade --plan professional
Symptoms:
Error: Session timeout after 30 minutes
Solutions:
// Set appropriate duration for your use case
const session = await client.sessions.create({
type: 'remote_viewing',
target: 'Complex investigation',
duration_minutes: 60 // Increase from default 30
});
async function monitorSession(sessionId, callback) {
const pollInterval = 30000; // 30 seconds
const poll = async () => {
try {
const status = await client.sessions.getStatus(sessionId);
if (status.status === 'completed') {
const results = await client.sessions.getResults(sessionId);
callback(null, results);
return;
}
if (status.status === 'failed') {
callback(new Error('Session failed'), null);
return;
}
// Continue polling
setTimeout(poll, pollInterval);
} catch (error) {
callback(error, null);
}
};
poll();
}
// Instead of polling, use webhooks
app.post('/webhook', (req, res) => {
const event = req.body;
if (event.type === 'session.completed') {
handleSessionCompleted(event.data);
} else if (event.type === 'session.failed') {
handleSessionFailed(event.data);
}
res.status(200).send('OK');
});
Symptoms:
Same query returns different results on multiple attempts
Solutions:
// Ensure consistent session parameters
const sessionConfig = {
type: 'remote_viewing',
target: 'Specific target description',
modality: 'visual',
precision_level: 'high', // Use high precision for consistency
random_seed: 12345 // Use same seed for reproducible results
};
async function getConsensusResult(target, numSessions = 3) {
const sessions = [];
for (let i = 0; i < numSessions; i++) {
const session = await client.sessions.create({
type: 'remote_viewing',
target,
modality: 'visual'
});
const results = await client.sessions.waitForCompletion(session.sessionId);
sessions.push(results);
}
return aggregateResults(sessions);
}
function aggregateResults(sessions) {
// Find common elements across sessions
const allDescriptors = sessions.flatMap(s => s.results.descriptors);
const descriptorCounts = {};
allDescriptors.forEach(desc => {
descriptorCounts[desc] = (descriptorCounts[desc] || 0) + 1;
});
// Return descriptors that appear in multiple sessions
const consensusDescriptors = Object.entries(descriptorCounts)
.filter(([_, count]) => count >= 2)
.map(([desc, _]) => desc);
return {
consensusDescriptors,
confidence: Math.max(...sessions.map(s => s.confidenceScore))
};
}
Symptoms:
API requests taking > 30 seconds to respond
Solutions:
// Use focused analysis instead of comprehensive when possible
const analysis = await client.analysis.submitTarget({
target_type: 'location',
reference_material: minimalData, // Send only essential data
analysis_type: 'focused', // Faster than comprehensive
priority: 'high' // Get faster processing
});
const cache = new Map();
async function getCachedAnalysis(analysisId) {
const cacheKey = `analysis:${analysisId}`;
if (cache.has(cacheKey)) {
const cached = cache.get(cacheKey);
if (Date.now() - cached.timestamp < 300000) { // 5 minutes
return cached.data;
}
}
const results = await client.analysis.getResults(analysisId);
cache.set(cacheKey, { data: results, timestamp: Date.now() });
return results;
}
const { MindPeekerClient } = require('@mindpeeker/javascript');
// Configure client for performance
const client = new MindPeekerClient({
apiKey: process.env.MINDPEEKER_API_KEY,
timeout: 60000, // Increase timeout
retryAttempts: 2,
keepAlive: true, // Enable connection reuse
maxConcurrent: 5 // Limit concurrent requests
});
Symptoms:
SyntaxError: Unexpected token in JSON at position X
Solutions:
async function safeJsonParse(response) {
const text = await response.text();
try {
return JSON.parse(text);
} catch (error) {
console.error('Invalid JSON response:', text.substring(0, 200));
throw new Error(`Invalid JSON from API: ${error.message}`);
}
}
// Usage
const response = await fetch('/api/sessions');
const data = await safeJsonParse(response);
async function makeApiRequest(url, options = {}) {
const response = await fetch(url, options);
// Check content type
const contentType = response.headers.get('content-type');
if (!contentType.includes('application/json')) {
throw new Error(`Expected JSON response, got ${contentType}`);
}
// Check response status
if (!response.ok) {
const errorText = await response.text();
throw new Error(`API Error ${response.status}: ${errorText}`);
}
return response.json();
}
Symptoms:
Webhook events not being received or delayed
Solutions:
// Test webhook endpoint locally
const ngrok = require('ngrok');
async function testWebhook() {
// Start local server
const server = express();
server.post('/webhook', express.raw({ type: 'application/json' }), (req, res) => {
console.log('Webhook received:', req.body.toString());
res.status(200).send('OK');
});
server.listen(3000);
// Create tunnel
const url = await ngrok.connect(3000);
console.log('Webhook URL:', url);
// Register webhook with MindPeeker
await client.webhooks.create({
url: `${url}/webhook`,
events: ['session.completed'],
secret: 'test_secret'
});
}
const crypto = require('crypto');
function verifyWebhookSignature(payload, signature, secret) {
if (!signature) {
console.error('Missing webhook signature');
return false;
}
const expectedSignature = crypto
.createHmac('sha256', secret)
.update(payload)
.digest('hex');
const isValid = crypto.timingSafeEqual(
Buffer.from(`sha256=${expectedSignature}`),
Buffer.from(signature)
);
if (!isValid) {
console.error('Invalid webhook signature');
console.log('Expected:', `sha256=${expectedSignature}`);
console.log('Received:', signature);
}
return isValid;
}
// Log webhook delivery attempts
const webhookAttempts = new Map();
function logWebhookAttempt(eventId, status, responseTime) {
webhookAttempts.set(eventId, {
status,
responseTime,
timestamp: new Date().toISOString()
});
// Clean old entries
const oneHourAgo = Date.now() - 3600000;
for (const [id, attempt] of webhookAttempts.entries()) {
if (new Date(attempt.timestamp).getTime() < oneHourAgo) {
webhookAttempts.delete(id);
}
}
console.log(`Webhook delivery stats: ${webhookAttempts.size} events in last hour`);
}
Symptoms:
Data not being saved correctly or database errors
Solutions:
async function saveSessionWithTransaction(sessionData) {
const connection = await db.getConnection();
try {
await connection.beginTransaction();
// Save session
const [sessionResult] = await connection.execute(
'INSERT INTO sessions (id, type, target, status) VALUES (?, ?, ?, ?)',
[sessionData.sessionId, sessionData.type, sessionData.target, sessionData.status]
);
// Save metadata
if (sessionData.metadata) {
await connection.execute(
'INSERT INTO session_metadata (session_id, key, value) VALUES (?, ?, ?)',
[sessionData.sessionId, 'metadata', JSON.stringify(sessionData.metadata)]
);
}
await connection.commit();
return sessionResult.insertId;
} catch (error) {
await connection.rollback();
throw error;
} finally {
connection.release();
}
}
function validateSessionData(data) {
const errors = [];
if (!data.sessionId || typeof data.sessionId !== 'string') {
errors.push('Invalid session ID');
}
if (!data.type || !['remote_viewing', 'dowsing', 'automatic_writing'].includes(data.type)) {
errors.push('Invalid session type');
}
if (data.confidenceScore && (data.confidenceScore < 0 || data.confidenceScore > 1)) {
errors.push('Confidence score must be between 0 and 1');
}
if (errors.length > 0) {
throw new Error(`Validation failed: ${errors.join(', ')}`);
}
return true;
}
Symptoms:
Application memory usage continuously increasing
Solutions:
function monitorMemory() {
const used = process.memoryUsage();
console.log('Memory Usage:');
for (let key in used) {
console.log(`${key}: ${Math.round(used[key] / 1024 / 1024 * 100) / 100} MB`);
}
// Check for memory leaks
if (used.heapUsed > 500 * 1024 * 1024) { // 500MB
console.warn('High memory usage detected');
}
}
// Run every 5 minutes
setInterval(monitorMemory, 300000);
class SessionManager {
constructor() {
this.activeSessions = new Map();
this.cleanupInterval = setInterval(() => this.cleanup(), 60000);
}
createSession(sessionData) {
const session = {
...sessionData,
createdAt: Date.now(),
lastActivity: Date.now()
};
this.activeSessions.set(session.sessionId, session);
return session;
}
cleanup() {
const now = Date.now();
const oneHourAgo = now - 3600000;
for (const [sessionId, session] of this.activeSessions.entries()) {
if (session.lastActivity < oneHourAgo) {
this.activeSessions.delete(sessionId);
console.log(`Cleaned up stale session: ${sessionId}`);
}
}
}
destroy() {
clearInterval(this.cleanupInterval);
this.activeSessions.clear();
}
}
Symptoms:
CPU usage consistently above 80%
Solutions:
// Use worker threads for CPU-intensive tasks
const { Worker, isMainThread, parentPort, workerData } = require('worker_threads');
if (isMainThread) {
async function processLargeDataset(data) {
return new Promise((resolve, reject) => {
const worker = new Worker(__filename, {
workerData: data
});
worker.on('message', resolve);
worker.on('error', reject);
});
}
} else {
// Process data in worker thread
const result = processData(workerData);
parentPort.postMessage(result);
}
const Queue = require('bull');
const redis = require('redis');
const sessionQueue = new Queue('session processing', {
redis: { port: 6379, host: 'localhost' }
});
// Process jobs in background
sessionQueue.process(async (job) => {
const { sessionId, sessionData } = job.data;
try {
const results = await client.sessions.getResults(sessionId);
await saveResultsToDatabase(sessionId, results);
return { success: true, sessionId };
} catch (error) {
console.error(`Failed to process session ${sessionId}:`, error);
throw error;
}
});
// Add job to queue
async function queueSessionProcessing(sessionId, sessionData) {
await sessionQueue.add('process', {
sessionId,
sessionData
}, {
attempts: 3,
backoff: 'exponential'
});
}
// Enable detailed logging
const debug = require('debug')('mindpeeker:integration');
// Add debug statements
debug('Creating session with data:', sessionData);
debug('API response:', response.data);
debug('Error occurred:', error);
// Set debug environment variable
// DEBUG=mindpeeker:* node app.js
class APITracer {
constructor() {
this.requests = [];
}
traceRequest(method, url, options) {
const requestId = this.generateRequestId();
const startTime = Date.now();
console.log(`[${requestId}] ${method} ${url}`);
return {
requestId,
startTime,
log: (message) => console.log(`[${requestId}] ${message}`)
};
}
traceResponse(trace, response) {
const duration = Date.now() - trace.startTime;
console.log(`[${trace.requestId}] Response ${response.status} (${duration}ms)`);
this.requests.push({
requestId: trace.requestId,
duration,
status: response.status,
timestamp: new Date().toISOString()
});
}
generateRequestId() {
return Math.random().toString(36).substr(2, 9);
}
getStats() {
const total = this.requests.length;
const avgDuration = this.requests.reduce((sum, r) => sum + r.duration, 0) / total;
const successRate = this.requests.filter(r => r.status < 400).length / total * 100;
return { total, avgDuration, successRate };
}
}
When reporting issues, include:
node --version
npm --version
uname -a
mindpeeker --version
mindpeeker config list
// Capture full error context
const errorContext = {
message: error.message,
stack: error.stack,
code: error.code,
status: error.status,
requestId: error.requestId,
timestamp: new Date().toISOString(),
userAgent: navigator.userAgent,
sessionId: currentSessionId
};
console.error('Error context:', JSON.stringify(errorContext, null, 2));
mindpeeker doctor
mindpeeker test connection
mindpeeker account limits
mindpeeker config validate
// Implement backup and recovery
async function backupSessionData() {
const sessions = await client.sessions.list({ limit: 1000 });
const backup = {
timestamp: new Date().toISOString(),
sessions: sessions
};
await fs.writeFile(
`backup/sessions-${Date.now()}.json`,
JSON.stringify(backup, null, 2)
);
}
async function restoreSessionData(backupFile) {
const backup = JSON.parse(await fs.readFile(backupFile, 'utf8'));
for (const session of backup.sessions) {
try {
await saveSessionToDatabase(session);
} catch (error) {
console.error(`Failed to restore session ${session.sessionId}:`, error);
}
}
}