Skip to content

Instantly share code, notes, and snippets.

@svpino
Created August 30, 2023 14:49
Show Gist options
  • Save svpino/a5e80ddcdfa00eba4d8951186689de9a to your computer and use it in GitHub Desktop.
Save svpino/a5e80ddcdfa00eba4d8951186689de9a to your computer and use it in GitHub Desktop.
Using LlamaIndex to ask question about your data
# !pip install llama-index monsterapi sentence_transformers --quiet
import os
from llama_index.llms import MonsterLLM
from llama_index import VectorStoreIndex, SimpleDirectoryReader, ServiceContext
from llama_index.embeddings import LangchainEmbedding
from langchain.embeddings import HuggingFaceEmbeddings
from sentence_transformers import SentenceTransformer
os.environ["MONSTER_API_KEY"] = "INSERT YOUR API KEY HERE"
model = "llama2-7b-chat"
documents = SimpleDirectoryReader("./data").load_data()
llm = MonsterLLM(model=model, temperature=0.75, context_window=1024)
embed_model = LangchainEmbedding(HuggingFaceEmbeddings())
service_context = ServiceContext.from_defaults(
chunk_size=1024, llm=llm, embed_model=embed_model
)
index = VectorStoreIndex.from_documents(documents, service_context=service_context)
query_engine = index.as_query_engine()
response = query_engine.query("Here is a question about your data")
print(response)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment