Skip to content
This repository was archived by the owner on Mar 7, 2025. It is now read-only.

Commit 561301f

Browse files
committed
tests: update model
Signed-off-by: Tomas Dvorak <[email protected]>
1 parent 853905f commit 561301f

File tree

18 files changed

+42
-38
lines changed

18 files changed

+42
-38
lines changed

examples/extensions/huggingface/huggingface_agent.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def __call__(self):
4949

5050
agent = IBMGenAIAgent(
5151
client=client,
52-
model="meta-llama/llama-3-70b-instruct",
52+
model="meta-llama/llama-3-1-70b-instruct",
5353
parameters=TextGenerationParameters(min_new_tokens=10, max_new_tokens=200, random_seed=777, temperature=0),
5454
additional_tools=[BitcoinPriceFetcher()],
5555
)

examples/extensions/langchain/langchain_agent.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def _run(self, word: str, run_manager: Optional[CallbackManagerForToolRun] = Non
7878
client = Client(credentials=Credentials.from_env())
7979
llm = LangChainChatInterface(
8080
client=client,
81-
model_id="meta-llama/llama-3-70b-instruct",
81+
model_id="meta-llama/llama-3-1-70b-instruct",
8282
parameters=TextGenerationParameters(
8383
max_new_tokens=250, min_new_tokens=20, temperature=0, stop_sequences=["\nObservation"]
8484
),

examples/extensions/langchain/langchain_chat_generate.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ def heading(text: str) -> str:
2727

2828
llm = LangChainChatInterface(
2929
client=Client(credentials=Credentials.from_env()),
30-
model_id="meta-llama/llama-3-70b-instruct",
30+
model_id="meta-llama/llama-3-1-70b-instruct",
3131
parameters=TextGenerationParameters(
3232
decoding_method=DecodingMethod.SAMPLE,
3333
max_new_tokens=100,

examples/extensions/langchain/langchain_chat_stream.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def heading(text: str) -> str:
2121
print(heading("Stream chat with langchain"))
2222

2323
llm = LangChainChatInterface(
24-
model_id="meta-llama/llama-3-70b-instruct",
24+
model_id="meta-llama/llama-3-1-70b-instruct",
2525
client=Client(credentials=Credentials.from_env()),
2626
parameters=TextGenerationParameters(
2727
decoding_method=DecodingMethod.SAMPLE,

examples/extensions/langchain/langchain_sql_agent.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def create_llm():
7575
client = Client(credentials=Credentials.from_env())
7676
return LangChainChatInterface(
7777
client=client,
78-
model_id="meta-llama/llama-3-70b-instruct",
78+
model_id="meta-llama/llama-3-1-70b-instruct",
7979
parameters=TextGenerationParameters(
8080
max_new_tokens=250, min_new_tokens=20, temperature=0, stop_sequences=["\nObservation"]
8181
),

examples/extensions/llama_index/llama_index_llm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ def heading(text: str) -> str:
2222

2323
llm = IBMGenAILlamaIndex(
2424
client=client,
25-
model_id="meta-llama/llama-3-70b-instruct",
25+
model_id="meta-llama/llama-3-1-70b-instruct",
2626
parameters=TextGenerationParameters(
2727
decoding_method=DecodingMethod.SAMPLE,
2828
max_new_tokens=100,

examples/text/chat.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ def heading(text: str) -> str:
3131
)
3232

3333
client = Client(credentials=Credentials.from_env())
34-
model_id = "meta-llama/llama-3-70b-instruct"
34+
model_id = "meta-llama/llama-3-1-70b-instruct"
3535

3636
prompt = "What is NLP and how it has evolved over the years?"
3737
print(heading("Generating a chat response"))

src/genai/extensions/langchain/chat_llm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ class LangChainChatInterface(BaseChatModel):
9393
client = Client(credentials=Credentials.from_env())
9494
llm = LangChainChatInterface(
9595
client=client,
96-
model_id="meta-llama/llama-3-70b-instruct",
96+
model_id="meta-llama/llama-3-1-70b-instruct",
9797
parameters=TextGenerationParameters(
9898
max_new_tokens=250,
9999
)

src/genai/extensions/langchain/llm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ class LangChainInterface(LLM):
6262
client = Client(credentials=Credentials.from_env())
6363
llm = LangChainInterface(
6464
client=client,
65-
model_id="meta-llama/llama-3-70b-instruct",
65+
model_id="meta-llama/llama-3-1-70b-instruct",
6666
parameters=TextGenerationParameters(max_new_tokens=50)
6767
)
6868

src/genai/text/chat/chat_generation_service.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def create(
7979
8080
# Create a new conversation
8181
response = client.text.chat.create(
82-
model_id="meta-llama/llama-3-70b-instruct",
82+
model_id="meta-llama/llama-3-1-70b-instruct",
8383
messages=[HumanMessage(content="Describe the game Chess?")],
8484
parameters=TextGenerationParameters(max_token_limit=100)
8585
)
@@ -150,7 +150,7 @@ def create_stream(
150150
151151
# Create a new conversation
152152
for response in client.text.chat.create_stream(
153-
model_id="meta-llama/llama-3-70b-instruct",
153+
model_id="meta-llama/llama-3-1-70b-instruct",
154154
messages=[HumanMessage(content="Describe the game Chess?")],
155155
parameters=TextGenerationParameters(max_token_limit=100)
156156
):

0 commit comments

Comments
 (0)