llm_chatting

Chat with LLM models

SDK Method

async def llm_chatting(model, messages, max_tokens=4096, 
temperature=0.7, top_p=0.9, top_k=0, repetition_penalty=1, presence_penalty=0, 
frequency_penalty=0) -> 

Usage example (only text)

from VisionCraftAPI import VisionCraftClient

async def main() -> None:
    
    api_key = "YOUR_API_KEY"
    client = VisionCraftClient(api_key=api_key)
    
    messages = [
        {
            "role": "user",
            "content": """There are 50 books in a library. 
            Sam decides to read 5 of the books. How many books are there now? 
            If there are 45 books, say "1". 
            Else, if there is the same amount of books, say "2"."""
        },
    ]
    
    answer = await client.llm_chatting(model="gemma-7b",
                                       messages=messages)
    
    print(answer.role)
    print(answer.content)
    
if __name__ == "__main__":
    import asyncio
    asyncio.run(main())

Usage example (with image)

import base64
import aiohttp

from VisionCraftAPI import VisionCraftClient

async def main() -> None:
    
    api_key = "YOUR_API_KEY"
    client = VisionCraftClient(api_key=api_key)
    
    async with aiohttp.ClientSession() as session:
        async with session.get("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg") as response:
            base64_image = base64.b64encode(await response.read()).decode("utf-8")
    
    messages = [{
                "role": "user",
                "content": [
                {
                "type": "text",
                "text": "What’s in this image?"
                },
                {
                "type": "image_url",
                "image_url": {
                    "url": f"data:image/jpeg;base64,{base64_image}"
                    }
                }]
            }]
    
    answer = await client.llm_chatting(model="llava-1.5-7b-hf",
                                       messages=messages)
    
    print(answer.role)
    print(answer.content)
    
if __name__ == "__main__":
    import asyncio
    asyncio.run(main())

For further information go to API Docs or see examples on OpenAI

Last updated