Skip to content

Commit 6a83c23

Browse files
committed
docker compose working ollama implementation
1 parent 72859d5 commit 6a83c23

13 files changed

+45
-147
lines changed

.streamlit/config.toml

Lines changed: 0 additions & 6 deletions
This file was deleted.

.streamlit/credentials.toml

Lines changed: 0 additions & 2 deletions
This file was deleted.

docker-compose.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
version: '3.8'
2+
services:
3+
ollama:
4+
image: ollama/ollama
5+
container_name: ollama
6+
ports:
7+
- "11434:11434"
8+
volumes:
9+
- ollama_volume:/root/.ollama
10+
restart: unless-stopped
11+
12+
volumes:
13+
ollama_volume:

examples/graph_evaluation_example.py

Lines changed: 0 additions & 43 deletions
This file was deleted.

examples/ollama-example.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
from langchain_community.llms import Ollama
2+
3+
llm = Ollama(model="llama2")
4+
5+
answer = llm.invoke("Tell me a joke")
6+
7+
print(answer)

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ langchain_google_genai = "0.0.11"
3030
html2text = "2020.1.16"
3131
faiss-cpu = "1.7.4"
3232
beautifulsoup4 = "4.12.3"
33-
trulens_eval = "0.23.0"
3433
pandas = "2.0.3"
3534
python-dotenv = "1.0.1"
3635
tiktoken = {version = ">=0.5.2,<0.6.0"}

requirements.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,3 @@ pandas==2.0.3
1010
python-dotenv==1.0.1
1111
tiktoken>=0.5.2,<0.6.0
1212
tqdm==4.66.1
13-
trulens_eval==0.23.0

scrapegraphai/evaluators/__init__.py

Lines changed: 0 additions & 4 deletions
This file was deleted.

scrapegraphai/evaluators/trulens_evaluator.py

Lines changed: 0 additions & 88 deletions
This file was deleted.

scrapegraphai/graphs/search_graph.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""
22
Module for making the search on the intenet
33
"""
4-
from ..models import OpenAI, Gemini
4+
from ..models import OpenAI, Gemini, Ollama
55
from .base_graph import BaseGraph
66
from ..nodes import (
77
SearchInternetNode,
@@ -33,6 +33,8 @@ def _create_llm(self, llm_config: dict):
3333
return OpenAI(llm_params)
3434
elif "gemini" in llm_params["model"]:
3535
return Gemini(llm_params)
36+
elif "llama2" in llm_params["model"]:
37+
return Ollama(llm_params)
3638
else:
3739
raise ValueError("Model not supported")
3840

scrapegraphai/models/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,4 @@
66
from .openai_itt import OpenAIImageToText
77
from .openai_tts import OpenAITextToSpeech
88
from .gemini import Gemini
9+
from .ollama import Ollama

scrapegraphai/models/ollama.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
"""
2+
openai configuration wrapper
3+
"""
4+
from langchain_community.chat_models import ChatOllama
5+
6+
7+
class Ollama(ChatOllama):
8+
"""Class for wrapping ollama module"""
9+
10+
def __init__(self, llm_config: dict):
11+
"""
12+
A wrapper for the ChatOllama class that provides default configuration
13+
and could be extended with additional methods if needed.
14+
15+
Args:
16+
llm_config (dict): Configuration parameters for the language model.
17+
"""
18+
# Initialize the superclass (ChatOllama) with provided config parameters
19+
super().__init__(**llm_config)

scrapegraphai/nodes/search_internet_node.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,8 @@ def execute(self, state):
8282

8383
search_template = """Given the following user prompt, return a query that can be
8484
used to search the internet for relevant information. \n
85-
You should return only the query string. \n
85+
You should return only the query string without any additional sentences. \n
86+
You are taught to reply directly giving the search query. \n
8687
User Prompt: {user_prompt}"""
8788

8889
search_prompt = PromptTemplate(

0 commit comments

Comments
 (0)