MuslimAI
Ask Islam
Salaam 👋 — Ask anything about the Qur’an, Islam, and Muslim life
Educational guidance with references (Qur’an & well‑known hadith collections) when possible. Not a fatwa — for rulings, consult a qualified scholar in your madhhab or local imam.
By using MuslimAI, you agree that responses are for learning, may be imperfect, and do not replace scholarly advice. If your question is sensitive (marriage, finance, health), please speak to a trusted scholar or counselor.
// Minimal Node server to proxy OpenAI or Gemini with streaming // Save as: api/ask.js (Vercel/Netlify) or server.js (Express). Node 18+ // Env vars required: OPENAI_API_KEY, GEMINI_API_KEY (optional) const PROVIDERS = { openai: { async stream({model, messages, temperature, system}, res){ const url = ‘https://api.openai.com/v1/chat/completions’; const payload = {model, messages: buildOpenAiMessages(system, messages), temperature, stream:true}; const r = await fetch(url, {method:’POST’, headers:{‘Authorization’:`Bearer ${process.env.OPENAI_API_KEY}`,’Content-Type’:’application/json’}, body:JSON.stringify(payload)}); if(!r.ok){ const t = await r.text(); res.write(`event: error\n`); res.write(`data: ${JSON.stringify({message:t||r.statusText})}\n\n`); res.end(); return; } res.setHeader(‘Content-Type’,’text/event-stream’); res.setHeader(‘Cache-Control’,’no-cache’); res.setHeader(‘Connection’,’keep-alive’); for await (const chunk of r.body){ const s = chunk.toString(); // OpenAI already sends SSE lines starting with “data:” res.write(s); } res.end(); } }, gemini: { async stream({model, messages, temperature, system}, res){ // Flatten to a single prompt for simplicity; you can enhance to multi-turn. const prompt = openAiToPlain(system, messages); const key = process.env.GEMINI_API_KEY; const url = `https://generativelanguage.googleapis.com/v1beta/models/${encodeURIComponent(model)}:streamGenerateContent?key=${key}`; const payload = { contents:[{role:’user’, parts:[{text:prompt}]}], generationConfig:{temperature} }; const r = await fetch(url, {method:’POST’, headers:{‘Content-Type’:’application/json’}, body:JSON.stringify(payload)}); if(!r.ok){ const t = await r.text(); res.write(`event: error\n`); res.write(`data: ${JSON.stringify({message:t||r.statusText})}\n\n`); res.end(); return; } res.setHeader(‘Content-Type’,’text/event-stream’); res.setHeader(‘Cache-Control’,’no-cache’); res.setHeader(‘Connection’,’keep-alive’); // Gemini streams JSON event per line for await (const chunk of r.body){ const s = chunk.toString().trim(); if(!s) continue; // Convert to OpenAI-like “data: {choices:[{delta:{content}}]}” for the client try{ const {candidates} = JSON.parse(s.replace(/^data:\s*/,”)); const text = candidates?.[0]?.content?.parts?.[0]?.text || ”; if(text){ const frame = {choices:[{ delta:{content:text} }]}; res.write(`data: ${JSON.stringify(frame)}\n\n`); } }catch{ /* ignore parse errors */ } } res.write(‘data: [DONE]\n\n’); res.end(); } } }; function buildOpenAiMessages(system, messages){ const arr = []; if(system){ arr.push({role:’system’, content: system}); } for(const m of messages){ arr.push({role: m.role, content: m.content}); } return arr; } function openAiToPlain(system, messages){ const parts = []; if(system){ parts.push(`[System Instruction]\n${system}\n`); } for(const m of messages){ parts.push(`${m.role.toUpperCase()}: ${m.content}`); } return parts.join(‘\n\n’); } // ———- Vercel style export ———- export default async function handler(req, res){ if(req.method !== ‘POST’){ res.setHeader(‘Allow’,’POST’); res.status(405).json({error:’Method not allowed’}); return; } try{ const {provider=’openai’, model=’gpt-4.1-mini’, messages=[], temperature=0.2, system=”} = req.body || {}; if(provider===’openai’ && !process.env.OPENAI_API_KEY){ throw new Error(‘Missing OPENAI_API_KEY’); } if(provider===’gemini’ && !process.env.GEMINI_API_KEY){ throw new Error(‘Missing GEMINI_API_KEY’); } const impl = PROVIDERS[provider]; if(!impl){ throw new Error(‘Unsupported provider’); } await impl.stream({model, messages, temperature, system}, res); }catch(err){ res.setHeader(‘Content-Type’,’application/json’); res.status(500).end(JSON.stringify({error:err.message||String(err)})); } } // ———- Express fallback (if not using Vercel) ———- // Uncomment to run as standalone server: // import express from ‘express’; // import cors from ‘cors’; // const app = express(); // app.use(cors()); // app.use(express.json()); // app.post(‘/api/ask’, handler); // app.listen(8787, ()=> console.log(‘Server on http://localhost:8787’));