Python examples
You can use the models within programming languages conveniently via existing libraries that support the OpenAI API. Therefore, mittwald’s AI hosting can often be used as a drop-in replacement.
For the following examples, first install the required libraries using a Python package manager and store the API key generated in mStudio in a .env file:
pip install python-dotenv openai langchain-openai
echo 'OPENAI_API_KEY="sk-…"' > .env
Then, you can access a model using the OpenAI package:
from openai import OpenAI
from dotenv import load_dotenv
# Load .env file
load_dotenv()
# Initialize client with custom host and key from environment
client = OpenAI(
base_url="https://llm.aihosting.mittwald.de/v1"
)
# Make a simple call
response = client.chat.completions.create(
model="Ministral-3-14B-Instruct-2512",
temperature = 0.15,
messages=[
{"role": "user", "content": "Moin and hello!"}
]
)
print(response.choices[0].message.content)
Alternatively, you can also use langchain:
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage
# Load .env file
load_dotenv()
# Initialize client with custom host and key from environment
chat = ChatOpenAI(
model="Ministral-3-14B-Instruct-2512",
base_url="https://llm.aihosting.mittwald.de/v1",
temperature = 0.15
)
# Get response
response = chat.invoke([
HumanMessage(content="Moin and hello!")
])
print(response.content)
Vision (image + text)
from openai import OpenAI
client = OpenAI(base_url="https://llm.aihosting.mittwald.de/v1")
resp = client.chat.completions.create(
model="Ministral-3-14B-Instruct-2512",
messages=[{
"role": "user",
"content": [
{"type": "text", "text": "Describe this image succinctly."},
{"type": "image_url", "image_url": {"url": "https://example.com/sample.jpg"}}
]
}],
temperature=0.1
)
print(resp.choices[0].message.content)
Notes: up to 4 images per request recommended; prefer square-ish aspect ratios.
Tool-calling (function calling)
from openai import OpenAI
client = OpenAI(base_url="https://llm.aihosting.mittwald.de/v1")
tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get current weather",
"parameters": {
"type": "object",
"properties": {"city": {"type": "string"}},
"required": ["city"]
}
}
}
]
resp = client.chat.completions.create(
model="Devstral-Small-2-24B-Instruct-2512",
messages=[{"role": "user", "content": "What is the weather in Berlin?"}],
tools=tools,
tool_choice="auto"
)
# Check if the model called a tool
if resp.choices[0].message.tool_calls:
call = resp.choices[0].message.tool_calls[0]
print(f"Function: {call.function.name}")
print(f"Arguments: {call.function.arguments}")
Streaming responses
from openai import OpenAI
client = OpenAI(base_url="https://llm.aihosting.mittwald.de/v1")
stream = client.chat.completions.create(
model="Devstral-Small-2-24B-Instruct-2512",
messages=[{"role": "user", "content": "Write a short poem about coding"}],
stream=True
)
for chunk in stream:
if chunk.choices[0].delta.content:
print(chunk.choices[0].delta.content, end="", flush=True)