Benutzer-Werkzeuge

Webseiten-Werkzeuge


openai

Dies ist eine alte Version des Dokuments!


API

Local

import requests
import json
import pyttsx3
 
# gemma-3-1b-it, qwen3-14b, openai-7b-v0.1
MODEL_NAME = "openai-7b-v0.1"
API_URL = "http://127.0.0.1:8080"
 
def frage_gpt(prompt_text):
    try:
        response = requests.post(
            API_URL + "/v1/chat/completions",
            headers={"Content-Type": "application/json"},
            data=json.dumps({
                "model": MODEL_NAME,
                "messages": [
                    {"role": "system", "content": "Du bist ein hilfreicher Assistent."},
                    {"role": "user", "content": prompt_text}
                ],
                "stream": False
            })
        )
        if response.status_code == 200:
            return response.json() # response.text
        else:
            return f"Fehler: {response.status_code} - {response.text}"
    except Exception as e:
        return f"Fehler: {e}"
 
def frage_gpt_stream(prompt_text):
    try:
        response = requests.post(
            API_URL + "/v1/chat/completions",
            headers={"Content-Type": "application/json"},
            data=json.dumps({
                "model": MODEL_NAME,
                "messages": [
                    {"role": "assistant", "content": "Du bist ein hilfreicher Assistent."},
                    {"role": "user", "content": prompt_text}
                ],
                "stream": True
            }),
            stream=True  # aktiviert Streaming
        )
 
        if response.status_code != 200:
            print(f"Fehler: {response.status_code} - {response.text}")
            return
 
        # Antwort-Stream verarbeiten
        for line in response.iter_lines():
            if line:
                decoded_line = line.decode("utf-8")
                if decoded_line.startswith("data: "):
                    payload = json.loads(decoded_line[6:])
                    delta = payload.get("choices", [{}])[0].get("delta", {}).get("content")
                    if delta:
                        print(delta, end="", flush=True)
 
    except Exception as e:
        print("") #print(f"Fehler: {e}")
 
 
engine = pyttsx3.init()
engine.setProperty("rate", 150)
engine.setProperty("volume", 1.0)
voices = engine.getProperty("voices")
for voice in voices:
    if "german" in voice.name.lower():
        engine.setProperty("voice", voice.id)
        break
 
 
while True:
    frage = input("Frage: ")
    frage_gpt_stream(frage)
    #print(antwort["choices"][0]["message"]["content"])
    #engine.say(antwort["choices"][0]["message"]["content"])
    #engine.runAndWait()

Chat Completion

import openai
import os
 
# API-Key einfügen oder aus Umgebungsvariable lesen
openai.api_key = os.getenv("OPENAI_API_KEY") or "sk-xxx..."
 
def frage_gpt(prompt_text):
    try:
        response = openai.ChatCompletion.create(
            model="gpt-4",
            messages=[
                {"role": "system", "content": "Du bist ein hilfreicher Assistent."},
                {"role": "user", "content": prompt_text}
            ],
            temperature=0.7,
            max_tokens=500
        )
        antwort = response['choices'][0]['message']['content']
        return antwort.strip()
    except Exception as e:
        return f"Fehler: {e}"
 
# Beispiel
frage = input("Frage: ")
antwort = frage_gpt(frage)
print("Antwort: ", antwort)

Chat Completion mit Verlauf

import openai
import os
 
openai.api_key = os.getenv("OPENAI_API_KEY") or "sk-xxxxx"
 
chat_history = [
    {"role": "system", "content": "Du bist ein hilfreicher Assistent."}
]
 
def frage_gpt_mit_verlauf(chat_history):
    try:
        response = openai.ChatCompletion.create(
            model="gpt-4",
            messages=chat_history,
            temperature=0.7,
            max_tokens=500
        )
        antwort = response['choices'][0]['message']['content']
        return antwort.strip()
    except Exception as e:
        return f"Fehler: {e}"
 
def frage_gpt_stream_mit_verlauf(chat_history):
    try:
        response = openai.ChatCompletion.create(
            model="gpt-4",
            messages=chat_history,
            temperature=0.7,
            max_tokens=500,
            stream=True
        )
 
        full_answer = ""
        for chunk in response:
            if 'choices' in chunk and len(chunk['choices']) > 0:
                delta = chunk['choices'][0]['delta']
                content = delta.get("content", "")
                print(content, end="", flush=True)
                full_answer += content
        print()
        return full_answer.strip()
 
    except Exception as e:
        return f"Fehler: {e}"
 
def main():
    while True:
        frage = input("Frage: ").strip()
        if frage.lower() in ['exit', 'quit']:
            print("Chat beendet.")
            break
 
        chat_history.append({"role": "user", "content": frage})
 
        antwort = frage_gpt_stream_mit_verlauf(chat_history)
        #antwort = frage_gpt_mit_verlauf(chat_history)  # falls du lieber ohne Streaming willst
 
        chat_history.append({"role": "assistant", "content": antwort})
 
if __name__ == "__main__":
    main()

Function Calling

import openai
import os
import json
import requests
 
# API-Key einfügen oder aus Umgebungsvariable lesen
openai.api_key = os.getenv("OPENAI_API_KEY") or "sk-...."
 
tools = [
    {
        "type": "function",
        "function": {
            "name": "get_weather",
            "description": "Get current temperature for a given location.",
            "parameters": {
                "type": "object",
                "properties": {
                    "location": {
                        "type": "string",
                        "description": "City and country e.g. Paris, France"
                    }
                },
                "required": ["location"]
            }
        }
    },
    {
        "type": "function",
        "function": {
            "name": "search_web",
            "description": "Search the internet for information on a given topic.",
            "parameters": {
                "type": "object",
                "properties": {
                    "query": {
                        "type": "string",
                        "description": "Search query, e.g. 'current weather in Paris'"
                    }
                },
                "required": ["query"]
            }
        }
    },
    {
        "type": "function",
        "function": {
            "name": "list_files",
            "description": "Listet alle Dateien in einem angegebenen Verzeichnis.",
            "parameters": {
                "type": "object",
                "properties": {
                    "directory": {
                        "type": "string",
                        "description": "Pfad zum Verzeichnis, z. B. '/home/user/docs'"
                    }
                },
                "required": ["directory"]
            }
        }
    }
 
]
 
messages = [{"role": "user", "content": "Welche Dateien findest du in C:\\Users\\manuel.zarat\\Desktop\\ncat ? Verwende das Tool list_files."}]
response = openai.ChatCompletion.create(
    model="gpt-4-0613",  # Modell muss Function Calling unterstützen
    messages=messages,
    tools=tools,
    tool_choice="auto"
)
 
response_message = response['choices'][0]['message']
tool_calls = response_message.get("tool_calls", [])
 
def get_weather(location):
    return f"The current temperature in {location} is 23 degrees and sunny."
 
def list_files(directory_path):
    try:
        files = os.listdir(directory_path)
        return [f for f in files if os.path.isfile(os.path.join(directory_path, f))]
    except Exception as e:
        return f"Fehler beim Zugriff auf das Verzeichnis: {e}"
 
if tool_calls:
    for call in tool_calls:
        function_name = call['function']['name']
        arguments = json.loads(call['function']['arguments'])
 
        if function_name == "list_files":
            location = arguments['directory']
            result = list_files(location)
            messages.append(response_message)
            messages.append({
                "role": "tool",
                "tool_call_id": call['id'],
                "name": function_name,
                "content": result if isinstance(result, str) else "\n".join(result)
            })
            followup = openai.ChatCompletion.create(
                model="gpt-4-0613",
                messages=messages
            )
            print(followup['choices'][0]['message']['content'])
 
        # Funktion ausführen
        if function_name == "get_weather":
            location = arguments['location']
            result = get_weather(location)
            messages.append(response_message)  
            messages.append({
                "role": "tool",
                "tool_call_id": call['id'],
                "name": function_name,
                "content": result
            })
            followup = openai.ChatCompletion.create(
                model="gpt-4-0613",
                messages=messages
            )
            print(followup['choices'][0]['message']['content'])
 
        if function_name == "search_web":
            query = arguments['query']
            result = search_web(query)
            messages.append(response_message)
            messages.append({
                "role": "tool",
                "tool_call_id": call['id'],
                "name": function_name,
                "content": result
            })
            followup = openai.ChatCompletion.create(
                model="gpt-4-0613",
                messages=messages
            )
            print(followup['choices'][0]['message']['content'])
 
else:
    # Kein Tool wurde verwendet
    print(response_message['content'])

Chat History mit Tools

import openai
import os
import json
 
# API-Key setzen
openai.api_key = os.getenv("OPENAI_API_KEY") or "sk-...."
 
# Tool-Definitionen
tools = [
    {
        "type": "function",
        "function": {
            "name": "get_weather",
            "description": "Get current temperature for a given location.",
            "parameters": {
                "type": "object",
                "properties": {
                    "location": {
                        "type": "string",
                        "description": "City and country e.g. Paris, France"
                    }
                },
                "required": ["location"]
            }
        }
    },
    {
        "type": "function",
        "function": {
            "name": "search_web",
            "description": "Search the internet for information on a given topic.",
            "parameters": {
                "type": "object",
                "properties": {
                    "query": {
                        "type": "string",
                        "description": "Search query, e.g. 'current weather in Paris'"
                    }
                },
                "required": ["query"]
            }
        }
    },
    {
        "type": "function",
        "function": {
            "name": "list_files",
            "description": "Listet alle Dateien in einem angegebenen Verzeichnis.",
            "parameters": {
                "type": "object",
                "properties": {
                    "directory": {
                        "type": "string",
                        "description": "Pfad zum Verzeichnis, z. B. '/home/user/docs'"
                    }
                },
                "required": ["directory"]
            }
        }
    },
    {
        "type": "function",
        "function": {
            "name": "read_file",
            "description": "Liest den Inhalt einer Datei aus.",
            "parameters": {
                "type": "object",
                "properties": {
                    "path": {
                        "type": "string",
                        "description": "Pfad zur Datei, z. B. 'C:/Users/user/Desktop/info.txt'"
                    }
                },
                "required": ["path"]
            }
        }
    }
 
]
 
# Tool-Funktionen
def get_weather(location):
    return f"The current temperature in {location} is 23 degrees and sunny."
 
def list_files(directory_path):
    try:
        files = os.listdir(directory_path)
        return [f for f in files if os.path.isfile(os.path.join(directory_path, f))]
    except Exception as e:
        return f"Fehler beim Zugriff auf das Verzeichnis: {e}"
 
def search_web(query):
    return f"Suchergebnisse für '{query}' sind derzeit nicht verfügbar (Dummy-Funktion)."
 
def read_file(path):
    try:
        with open(path, 'r', encoding='utf-8') as f:
            content = f.read()
        return content
    except Exception as e:
        return f"Fehler beim Lesen der Datei: {e}"
 
 
# GPT-Interaktion mit Tool-Unterstützung
def frage_gpt_mit_tools(chat_history):
    try:
        response = openai.ChatCompletion.create(
            model="gpt-4-0613",
            messages=chat_history,
            tools=tools,
            tool_choice="auto"
        )
 
        response_message = response['choices'][0]['message']
        tool_calls = response_message.get("tool_calls", [])
 
        if tool_calls:
            for call in tool_calls:
                function_name = call['function']['name']
                arguments = json.loads(call['function']['arguments'])
 
                if function_name == "list_files":
                    result = list_files(arguments['directory'])
 
                elif function_name == "get_weather":
                    result = get_weather(arguments['location'])
 
                elif function_name == "search_web":
                    result = search_web(arguments['query'])
 
                elif function_name == "read_file":
                    result = read_file(arguments['path'])
 
                chat_history.append(response_message)
                chat_history.append({
                    "role": "tool",
                    "tool_call_id": call['id'],
                    "name": function_name,
                    "content": result if isinstance(result, str) else "\n".join(result)
                })
 
            followup = openai.ChatCompletion.create(
                model="gpt-4-0613",
                messages=chat_history
            )
            antwort = followup['choices'][0]['message']['content']
        else:
            antwort = response_message['content']
 
        return antwort.strip()
 
    except Exception as e:
        return f"Fehler: {e}"
 
# Hauptfunktion
def main():
    chat_history = [
        {"role": "system", "content": "Du bist ein hilfreicher Assistent."}
    ]
 
    while True:
        frage = input("Frage: ").strip()
        if frage.lower() in ['exit', 'quit']:
            print("Chat beendet.")
            break
 
        chat_history.append({"role": "user", "content": frage})
        antwort = frage_gpt_mit_tools(chat_history)
        print(antwort)
        chat_history.append({"role": "assistant", "content": antwort})
 
if __name__ == "__main__":
    main()
openai.1762120822.txt.gz · Zuletzt geändert: 2025/11/02 23:00 von jango