Namspaces not created even if my program running sucessfully?

below is the code which runs sucessfully but after uploading file sucesssfully on terminal its showing but namespace now showing on the pinecone console website please tell me if i make any mistake? import { db } from ‘@/db’
import { getKindeServerSession } from ‘@kinde-oss/kinde-auth-nextjs/server’
import {
createUploadthing,
type FileRouter,
} from ‘uploadthing/next’

import { PDFLoader } from ‘langchain/document_loaders/fs/pdf’
import { OpenAIEmbeddings } from ‘langchain/embeddings/openai’
import { PineconeStore } from ‘langchain/vectorstores/pinecone’
import { getPineconeClient } from ‘@/lib/pinecone’
import { getUserSubscriptionPlan } from ‘@/lib/stripe’
import { PLANS } from ‘@/config/stripe’

const f = createUploadthing()

const middleware = async () => {
const { getUser } = getKindeServerSession()
const user = getUser()

if (!user || !user.id) throw new Error(‘Unauthorized’)

const subscriptionPlan = await getUserSubscriptionPlan()

return { subscriptionPlan, userId: user.id }
}

const onUploadComplete = async ({
metadata,
file,
}: {
metadata: Awaited<ReturnType>
file: {
key: string
name: string
url: string
}
}) => {
const isFileExist = await db.file.findFirst({
where: {
key: file.key,
},
})

if (isFileExist) return

const createdFile = await db.file.create({
data: {
key: file.key,
name: file.name,
userId: metadata.userId,
url: https://utfs.io/f/${file.key},
uploadStatus: ‘PROCESSING’,
},
})

try {
const response = await fetch(
https://utfs.io/f/${file.key}
)

const blob = await response.blob()

const loader = new PDFLoader(blob)

const pageLevelDocs = await loader.load()

const pagesAmt = pageLevelDocs.length

const { subscriptionPlan } = metadata
const { isSubscribed } = subscriptionPlan

const isProExceeded =
  pagesAmt >
  PLANS.find((plan) => plan.name === 'Pro')!.pagesPerPdf
const isFreeExceeded =
  pagesAmt >
  PLANS.find((plan) => plan.name === 'Free')!
    .pagesPerPdf

if (
  (isSubscribed && isProExceeded) ||
  (!isSubscribed && isFreeExceeded)
) {
  await db.file.update({
    data: {
      uploadStatus: 'SUCCESS',
    },
    where: {
      id: createdFile.id,
    },
  })
}

// vectorize and index entire document
const pinecone = await getPineconeClient()
const pineconeIndex = pinecone.Index('doctalker')

console.log("the pinecone is = " , pinecone)
console.log("the pineconeindex is is = " , pineconeIndex)
const embeddings = new OpenAIEmbeddings({
openAIApiKey: process.env.OPENAI_API_KEY,
})

await PineconeStore.fromDocuments(
  pageLevelDocs,
  embeddings,
  {
    pineconeIndex,
    namespace: createdFile.id,
  }
)

await db.file.update({
  data: {
    uploadStatus: 'SUCCESS',
  },
  where: {
    id: createdFile.id,
  },
})

} catch (err) {
await db.file.update({
data: {
uploadStatus: ‘FAILED’,
},
where: {
id: createdFile.id,
},
})
}
}

export const ourFileRouter = {
freePlanUploader: f({ pdf: { maxFileSize: ‘4MB’ } })
.middleware(middleware)
.onUploadComplete(onUploadComplete),
proPlanUploader: f({ pdf: { maxFileSize: ‘16MB’ } })
.middleware(middleware)
.onUploadComplete(onUploadComplete),
} satisfies FileRouter

export type OurFileRouter = typeof ourFileRouter

this is my matches.ts file

hiii
TypeError: Cannot read properties of null (reading ‘Index’)
at getMatchesFromEmbeddings (webpack-internal:///(api)/./src/pages/api/matches.ts:17:28)
i am getting this error after updation of new pinecone dependencies
import { Pinecone } from ‘@pinecone-database/pinecone’; this one

// import { Pinecone, ScoredVector } from “@pinecone-database/pinecone”;
// import { Pinecone, ScoredVector } from “@pinecone-database/pinecone”;

import { Pinecone } from ‘@pinecone-database/pinecone’;
import {PineconeRecord} from ‘@pinecone-database/pinecone’;

export type Metadata = {
url: string,
text: string,
chunk: string,
}
console.log(“metaadataaaa”)
let pinecone: Pinecone | null = null

const getMatchesFromEmbeddings = async (embeddings: number,topK: number,namespace: string): Promise => {
if (!process.env.PINECONE_INDEX_NAME) {
console.log(“process.env.PINECONE_INDEX_NAME”,process.env.PINECONE_INDEX_NAME)
throw (new Error(“PINECONE_INDEX_NAME is not set”))
}

console.log(“pineone=>>>>>>>>>>>>>>>>>>>>>>>>>>>>”,pinecone)
console.log(“hiiiiiii>>>>>>”)
console.log(“process.env.PINECONE_INDEX_NAME”,process.env.PINECONE_INDEX_NAME)
const index = pinecone!.Index(process.env.PINECONE_INDEX_NAME)

console.log(“indexxxxx==>>>>>”,index)
const queryRequest = {
vector: embeddings,
topK,
includeMetadata: true,
namespace,
}
console.log(queryRequest,“queryRequest=>>>>>>>>>>>>>>>>>>>>”)
try {

const queryResult = await index.query(queryRequest)

return queryResult.matches?.map(match => ({
  ...match,
  metadata: match.metadata as Metadata
})) || []

} catch (e) {
console.log("Error querying embeddings: ", e)
throw (new Error(Error querying embeddings: ${e},))
}
}

export { getMatchesFromEmbeddings }

this is my chat .ts file

// // Next.js API route support: Routing: API Routes | Next.js
// import { PineconeClient } from “@pinecone-database/pinecone”;
// // import * as Ably from ‘ably’;
// // import { CallbackManager } from “langchain/callbacks”;
// import { CallbackManager } from “@langchain/core/callbacks/manager”;
// // import { LLMChain } from “langchain/chains”;
// import { LLMChain } from “langchain/chains”;
// // import { ChatOpenAI } from “langchain/chat_models/openai”;
// // import { OpenAIEmbeddings } from ‘langchain/embeddings/openai’;
// import { OpenAIEmbeddings, ChatOpenAI, OpenAI } from ‘@langchain/openai’;
// // import { OpenAI } from “langchain/llms/openai”;
// // import { OpenAI } from “@langchain/openai”;
// // import { PromptTemplate } from “langchain/prompts”;
// import { PromptTemplate } from “@langchain/core/prompts”;
// import type { NextApiRequest, NextApiResponse } from ‘next’;
// import { uuid } from ‘uuidv4’;
// import { summarizeLongDocument } from ‘./summarizer’;

// import { ConversationLog } from ‘./conversationLog’;
// import { Metadata, getMatchesFromEmbeddings } from ‘./matches’;
// import { templates } from ‘./templates’;

// const llm = new OpenAI({});
// let pinecone: PineconeClient | null = null

// const initPineconeClient = async () => {
// pinecone = new PineconeClient();
// console.log(“pinecone======>>>>>>>”,pinecone)

// await pinecone.init({
// environment: process.env.PINECONE_ENVIRONMENT!,
// apiKey: process.env.PINECONE_API_KEY!,
// });
// }

console.log(“hiiii”)

import { Pinecone } from ‘@pinecone-database/pinecone’;

// import { PineconeClient } from “@pinecone-database/pinecone”;
import { CallbackManager } from “@langchain/core/callbacks/manager”;
import { LLMChain } from “langchain/chains”;
import { OpenAIEmbeddings, ChatOpenAI, OpenAI } from ‘@langchain/openai’;
import { PromptTemplate } from “@langchain/core/prompts”;
import type { NextApiRequest, NextApiResponse } from ‘next’;
import { uuid } from ‘uuidv4’;
import { summarizeLongDocument } from ‘./summarizer’;
import { ConversationLog } from ‘./conversationLog’;
import { Metadata, getMatchesFromEmbeddings } from ‘./matches’;
import { templates } from ‘./templates’;

// Initialize PineconeClient
// const pinecone = new PineconeClient();
// const llm = new OpenAI({});

// const initPineconeClient = async () => {
// console.log(“Initializing PineconeClient…”);
// await pinecone.init({
// apiKey: process.env.PINECONE_API_KEY!,
// });
// console.log(“PineconeClient initialized successfully!”);
// }
// initPineconeClient().catch(error => {
// console.error(“Error initializing PineconeClient:”, error);
// });

// console.log(“PineconeClient instance:”, pinecone);

// console.log(“initPineconeClient========>>>>>>”,initPineconeClient)

console.log(“in the code>>>>>>”)
const llm = new OpenAI({});
let pinecone: Pinecone | null = null

const pc = new Pinecone({ apiKey: process.env.PINECONE_API_KEY!});
console.log(“PineconeClient instance:”, pc);

// const initPineconeClient = async () => {
// console.log(“Initializing PineconeClient…”);
// // await pinecone({
// // apiKey: process.env.PINECONE_API_KEY!,
// // });
// console.log(“PineconeClient initialized successfully!”);
// }
const initPineconeClient = async () => {
console.log(“Initializing PineconeClient…”);
const pc = new Pinecone({ apiKey: process.env.PINECONE_API_KEY! });
console.log(“PineconeClient initialized successfully!”,pc);
return pc; // Optionally return the Pinecone client for further usage
}

import { NextApiResponseServerIO } from “…/…/types/next”;
import { Namespace } from ‘socket.io’;

const handleRequest = async ({ prompt, userId, socketIO }: { prompt: string, userId: string, socketIO: any }) => {
if (!pinecone) {
await initPineconeClient();
}
console.log(“handleRequest”,handleRequest)

let summarizedCount = 0;

try {
// const channel = ably.channels.get(userId);
const interactionId = uuid()

// Retrieve the conversation log and save the user's prompt
const conversationLog = new ConversationLog(userId)
const conversationHistory = await conversationLog.getConversation({ limit: 50 })
await conversationLog.addEntry({ entry: prompt, speaker: "user" })

// Build an LLM chain that will improve the user prompt
const inquiryChain = new LLMChain({
  llm, prompt: new PromptTemplate({
    template: templates.inquiryTemplate,
    inputVariables: ["userPrompt", "conversationHistory"],
  })
});
const inquiryChainResult = await inquiryChain.call({ userPrompt: prompt, conversationHistory })
const inquiry = inquiryChainResult.text

// Embed the user's intent and query the Pinecone index
const embedder = new OpenAIEmbeddings();

const embeddings = await embedder.embedQuery(inquiry);

socketIO.emit(userId, {
  data: {
    event: "status",
    message: "Finding matches...",
  }
})

console.log(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>embedding")

const matches = await getMatchesFromEmbeddings(embeddings,3,"langchain");
console.log(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>below")

const urls = matches && Array.from(new Set(matches.map(match => {
  // const metadata = match.metadata as Metadata
  // console.log("metadata ==> ", metadata);
  // const { id } = metadata
  return match.id
})))

const docs = matches && Array.from(
  matches.reduce((map, match) => {
    const metadata = match.metadata as Metadata;
    const { text } = metadata;
    if (!map.has(match.id)) {
      map.set(match.id, text);
    }
    return map;
  }, new Map())
).map(([_, text]) => text);


const promptTemplate = new PromptTemplate({
  template: templates.qaTemplate4,
  inputVariables: ["summaries", "question", "conversationHistory"],
});


const chat = new ChatOpenAI({
  streaming: true,
  verbose: true,
  modelName: "gpt-3.5-turbo-0301",
  callbackManager: CallbackManager.fromHandlers({
    async handleLLMNewToken(token) {
      socketIO.emit(userId, {
        data: {
          event: "response",
          token: token,
          interactionId
        }
      })
    },
    async handleLLMEnd(result) {

      socketIO.emit(userId, {
        data: {
          event: "responseEnd",
          token: "END",
          interactionId
        }
      });
    }
  }),
});

const chain = new LLMChain({
  prompt: promptTemplate,
  llm: chat,
});

const allDocs = docs.join("\n")
if (allDocs.length > 4000) {
  socketIO.emit(userId, {
    data: {
      event: "status",
      message: `Just a second, forming final answer...`,
    }
  })
}

// const summary = allDocs.length > 4000 ? await summarizeLongDocument({ document: allDocs, inquiry }) : allDocs
const summary = allDocs;

await chain.call({
  summaries: summary,
  question: prompt,
  conversationHistory,
  urls: urls
});

} catch (error) {
//@ts-ignore
console.error(error)
}
}

export default async function handler(
req: NextApiRequest,
res: NextApiResponseServerIO
) {
const { body } = req;
const { prompt, userId } = body;
const socketIO = res.socket?.server?.io;
await handleRequest({ prompt, userId, socketIO })
res.status(200).json({ “message”: “started” })
}

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.