require('dotenv/config'); const { AzureOpenAI } = require('openai'); const fs = require('fs'); const path = require('path'); // Get environment variables const azureOpenAIKey = process.env.AZURE_OPENAI_KEY; const azureOpenAIEndpoint = process.env.AZURE_OPENAI_ENDPOINT; const azureOpenAIVersion = "2024-05-01-preview"; // Check env variables if (!azureOpenAIKey || !azureOpenAIEndpoint) { throw new Error("Please set AZURE_OPENAI_KEY and AZURE_OPENAI_ENDPOINT in your environment variables."); } // Initialize the Azure SDK client const getClient = () => { return new AzureOpenAI({ endpoint: azureOpenAIEndpoint, apiVersion: azureOpenAIVersion, apiKey: azureOpenAIKey, }); }; const assistantsClient = getClient(); // Existing vector store ID const vectorStoreId = "vs_Ja4pRNEkO6Pl8T2Xs7JU1OtV"; // Upload Files to Existing Vector Store const uploadFilesToExistingVectorStore = async (filePaths) => { try { // const fileStreams = filePaths.map(filePath => fs.createReadStream(path.resolve(filePath))); const fileStreams = filePaths.map(filePath => fs.createReadStream(filePath)); // const fileStreams = filePaths.map(filePath => { // return fs.createReadStream(path.resolve(__dirname, filePath)); // }); // const fileBatch = await assistantsClient.beta.vector_stores.file_batches.upload_and_poll( // vectorStoreId, // fileStreams // ); const fileBatch = await assistantsClient.beta.vector_stores.file_batches.upload_and_poll(vectorStoreId, { files: fileStreams }); console.log(`Files uploaded to vector store with status: ${fileBatch.status}`); } catch (error) { console.error(`Error uploading files: ${error.message}`); } }; // Run Assistant with User Query const runAssistant = async (searchQuery) => { try { // Setup the assistant const assistantOptions = { model: "gpt-4o-mini", name: "PDF Search", instructions: "You are an expert at searching PDF documents.", tools: [{"type": "file_search", "file_search": {"ranking_options": {"ranker": "default_2024_08_21", "score_threshold": 0}}}], tool_resources: {"file_search": {"vector_store_ids": [vectorStoreId]}}, temperature: 1, top_p: 1, }; const assistant = await assistantsClient.beta.assistants.create(assistantOptions); // Create a thread const assistantThread = await assistantsClient.beta.threads.create({ tool_resources: { "file_search": { "vector_store_ids": [vectorStoreId] } }, }); console.log(`Thread created: ${JSON.stringify(assistantThread)}`); // Add the user's search query to the thread const threadResponse = await assistantsClient.beta.threads.messages.create( assistantThread.id, { role: "user", content: searchQuery, } ); console.log(`Message created: ${JSON.stringify(threadResponse)}`); // Run the assistant on the thread const runResponse = await assistantsClient.beta.threads.runs.create( assistantThread.id, { assistant_id: assistant.id } ); console.log(`Run started: ${JSON.stringify(runResponse)}`); // Polling until the run completes let runStatus = runResponse.status; while (runStatus === 'queued' || runStatus === 'in_progress') { await new Promise(resolve => setTimeout(resolve, 1000)); const runStatusResponse = await assistantsClient.beta.threads.runs.retrieve( assistantThread.id, runResponse.id ); runStatus = runStatusResponse.status; console.log(`Current run status: ${runStatus}`); } // Retrieve messages if the run is completed if (runStatus === 'completed') { const messagesResponse = await assistantsClient.beta.threads.messages.list( assistantThread.id ); console.log(`Messages in the thread: ${JSON.stringify(messagesResponse)}`); } else { console.log(`Run status is ${runStatus}, unable to fetch messages.`); } } catch (error) { console.error(`Error running the assistant: ${error.message}`); } }; // Example usage: Upload documents and search const main = async () => { const filePaths = ["D:/Ebitaus/TestCodes/uploads/CERTIFICATE OF INCORPORATION.pdf"]; const searchQuery = "Can you give pan"; // Upload files to the existing vector store await uploadFilesToExistingVectorStore(filePaths); // Run the assistant with the search query await runAssistant(searchQuery); }; main();