# Function Calling

Download Ollama

# Installation

  • pip install gradio
  • pip install llama-index

# Modelfile

# Modelfile generated by "ollama show"
# To build a new Modelfile based on this one, replace the FROM line with:
# FROM llama3:latest

FROM C:\\Users\\ISAC\\.ollama\\models\\blobs\\sha256-00e1317cbf74d901080d7100f57580ba8dd8de57203072dc6f668324ba545f29
TEMPLATE """{{ if .System }}<|start_header_id|>system<|end_header_id|>

{{ .System }}<|eot_id|>{{ end }}{{ if .Prompt }}<|start_header_id|>user<|end_header_id|>

{{ .Prompt }}<|eot_id|>{{ end }}<|start_header_id|>assistant<|end_header_id|>

{{ .Response }}<|eot_id|>"""
PARAMETER num_keep 24
PARAMETER stop "<|start_header_id|>"
PARAMETER stop "<|end_header_id|>"
PARAMETER stop "<|eot_id|>"
PARAMETER num_thread 24 
PARAMETER temperature 0

# Code LLM function calling

import gradio as gr
from llama_index.llms.ollama import Ollama
from llama_index.core.tools import FunctionTool
from llama_index.core.agent import ReActAgent
import subprocess
import requests

def open_cmd(action:str) -> str:
    """ opens a new cmd window """
    try:
        return subprocess.Popen("start cmd.exe /c cmd.exe",shell=True) 
    except Exception as e:
        print(f"error: {str(e)}")

cmd_tool=FunctionTool.from_defaults(fn=open_cmd)

def agent_chat(user_input:str,context) -> str :
    """Executes user needs then returns a successful message string"""
    #print(f"{question}")
    context=[]
    formatted_prompt = f"{str(user_input)}."
    #llm = Ollama(model="increasedthreads:24", request_timeout=600.0)
    llm = Ollama(model="llama3:zerotempactions", request_timeout=600.0)
    agent=ReActAgent.from_tools([cmd_tool],llm=llm,verbose=True)

    response=str(agent.chat(f"{formatted_prompt}"))
    print(f"response: {response}")
    return str(response)

#llm = Ollama(model="llama3:zerotempactions", request_timeout=600.0)
#agent=ReActAgent.from_tools([cmd_tool],llm=llm,verbose=True)

#response=agent.chat("Open this youtube url https://www.youtube.com/watch?v=dQw4w9WgXcQ , then open the txt file named 'Adam first action.txt' ")
#print(response)
iface = gr.ChatInterface(agent_chat,textbox=gr.Textbox(placeholder="Let me help you, what do you want me to do?",container=False,scale=7),title="Action Executor",description="Actions made easy!").launch(server_name="0.0.0.0",server_port=8889)