import ollama
# Step 1: Pull the model
ollama.pull(model='llama2')
# Step 2: Use the model in a chat
response = ollama.chat(model='llama2', messages=[
{
'role': 'user',
'content': 'Why is the sky blue?',
},
])
print(response)
///////////////////////////////
import ollama
import time
# Function to pull the model with retries
def pull_model_with_retries(model_name, retries=3, delay=5):
for attempt in range(retries):
try:
print(f"Pulling model '{model_name}', attempt {attempt + 1}")
ollama.pull(model=model_name)
print(f"Successfully pulled model '{model_name}'")
return True
except Exception as e:
print(f"Failed to pull model '{model_name}' on attempt {attempt + 1}: {e}")
if attempt < retries - 1:
print(f"Retrying in {delay} seconds...")
time.sleep(delay)
print(f"Failed to pull model '{model_name}' after {retries} attempts")
return False
# Attempt to pull the model with retries
if pull_model_with_retries('llama2'):
# If successful, proceed with the chat
response = ollama.chat(model='llama2', messages=[
{
'role': 'user',
'content': 'Why is the sky blue?',
},
])
print(response)
else:
print("Unable to pull model 'llama2'. Please check your connection and try again.")
###########################
import ollama
# List of potential smaller models (example names)
models = ['llama2-small', 'llama2-mini', 'llama2-base']
# Attempt to pull each model until one succeeds
for model in models:
try:
print(f"Attempting to pull model: {model}")
ollama.pull(model=model)
print(f"Successfully pulled model: {model}")
# Use the model in a chat after successful pull
response = ollama.chat(model=model, messages=[
{
'role': 'user',
'content': 'Why is the sky blue?',
},
])
print(response)
break
except Exception as e:
print(f"Failed to pull model {model}: {e}")
print("Finished pulling models.")
No hay comentarios:
Publicar un comentario