code365scripts.openai-help.xml

<?xml version="1.0" encoding="utf-8"?>
<helpItems schema="maml" xmlns="http://msh">
  <command:command xmlns:maml="http://schemas.microsoft.com/maml/2004/10" xmlns:command="http://schemas.microsoft.com/maml/dev/command/2004/10" xmlns:dev="http://schemas.microsoft.com/maml/dev/2004/10" xmlns:MSHelp="http://msdn.microsoft.com/mshelp">
    <command:details>
      <command:name>Get-OpenAILogs</command:name>
      <command:verb>Get</command:verb>
      <command:noun>OpenAILogs</command:noun>
      <maml:description>
        <maml:para>Get logs of OpenAI module</maml:para>
      </maml:description>
    </command:details>
    <maml:description>
      <maml:para>Get logs of all the prompt and execution details, such as time, duration, tokens</maml:para>
    </maml:description>
    <command:syntax>
      <command:syntaxItem>
        <maml:name>Get-OpenAILogs</maml:name>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
          <maml:name>all</maml:name>
          <maml:description>
            <maml:para>Get all logs?</maml:para>
          </maml:description>
          <dev:type>
            <maml:name>SwitchParameter</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>False</dev:defaultValue>
        </command:parameter>
      </command:syntaxItem>
    </command:syntax>
    <command:parameters>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
        <maml:name>all</maml:name>
        <maml:description>
          <maml:para>Get all logs?</maml:para>
        </maml:description>
        <command:parameterValue required="false" variableLength="false">SwitchParameter</command:parameterValue>
        <dev:type>
          <maml:name>SwitchParameter</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>False</dev:defaultValue>
      </command:parameter>
    </command:parameters>
    <command:inputTypes />
    <command:returnValues />
    <maml:alertSet>
      <maml:alert>
        <maml:para></maml:para>
      </maml:alert>
    </maml:alertSet>
    <command:examples>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 1 --------------------------</maml:title>
        <dev:code>Get-OpenAILogs</dev:code>
        <dev:remarks>
          <maml:para>Get the log of today</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 2 --------------------------</maml:title>
        <dev:code>Get-OpenAILogs -all</dev:code>
        <dev:remarks>
          <maml:para>Get all logs</maml:para>
        </dev:remarks>
      </command:example>
    </command:examples>
    <command:relatedLinks>
      <maml:navigationLink>
        <maml:linkText>Github link</maml:linkText>
        <maml:uri>https://github.com/code365opensource/code365scripts/tree/master/code365scripts.openai</maml:uri>
      </maml:navigationLink>
    </command:relatedLinks>
  </command:command>
  <command:command xmlns:maml="http://schemas.microsoft.com/maml/2004/10" xmlns:command="http://schemas.microsoft.com/maml/dev/command/2004/10" xmlns:dev="http://schemas.microsoft.com/maml/dev/2004/10" xmlns:MSHelp="http://msdn.microsoft.com/mshelp">
    <command:details>
      <command:name>New-OpenAICompletion</command:name>
      <command:verb>New</command:verb>
      <command:noun>OpenAICompletion</command:noun>
      <maml:description>
        <maml:para>Call the completion api and return the result</maml:para>
      </maml:description>
    </command:details>
    <maml:description>
      <maml:para>The method supports both OpenAI origial service and Azure OpenAI service</maml:para>
    </maml:description>
    <command:syntax>
      <command:syntaxItem>
        <maml:name>New-OpenAICompletion</maml:name>
        <command:parameter required="true" variableLength="true" globbing="false" pipelineInput="True (ByValue)" position="1" aliases="none">
          <maml:name>prompt</maml:name>
          <maml:description>
            <maml:para>Your prompt text, this is the only important parameter</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
          <dev:type>
            <maml:name>String</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>None</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
          <maml:name>api_key</maml:name>
          <maml:description>
            <maml:para>OpenAI API Key</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
          <dev:type>
            <maml:name>String</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>None</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
          <maml:name>azure</maml:name>
          <maml:description>
            <maml:para>If you want to use Azure OpenAI service?</maml:para>
          </maml:description>
          <dev:type>
            <maml:name>SwitchParameter</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>False</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
          <maml:name>endpoint</maml:name>
          <maml:description>
            <maml:para>OpenAI service endpoint. You can specify it by environment variable (OPENAI_ENDPOINT or OPENAI_ENDPOINT_AZURE).</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
          <dev:type>
            <maml:name>String</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>None</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
          <maml:name>engine</maml:name>
          <maml:description>
            <maml:para>The name of your model. You can specify it by environment variable (OPENAI_ENGINE or OPENAI_ENGINE_AZURE).</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
          <dev:type>
            <maml:name>String</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>None</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
          <maml:name>max_tokens</maml:name>
          <maml:description>
            <maml:para>The maximum number of tokens to generate in the completion, default value is 1024. It can be different by model, please reference the official document carefully.</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">Int32</command:parameterValue>
          <dev:type>
            <maml:name>Int32</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>1024</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
          <maml:name>n</maml:name>
          <maml:description>
            <maml:para>The number of completions you requested. Default value is 1.</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">Int32</command:parameterValue>
          <dev:type>
            <maml:name>Int32</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>1</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
          <maml:name>temperature</maml:name>
          <maml:description>
            <maml:para>What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">Double</command:parameterValue>
          <dev:type>
            <maml:name>Double</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>1</dev:defaultValue>
        </command:parameter>
      </command:syntaxItem>
    </command:syntax>
    <command:parameters>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
        <maml:name>api_key</maml:name>
        <maml:description>
          <maml:para>OpenAI API Key, you can specify it by environment variable (OPENAI_API_KEY or OPENAI_API_KEY_AZURE)</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
        <dev:type>
          <maml:name>String</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>None</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
        <maml:name>azure</maml:name>
        <maml:description>
          <maml:para>If you want to use Azure OpenAI serivce?</maml:para>
        </maml:description>
        <command:parameterValue required="false" variableLength="false">SwitchParameter</command:parameterValue>
        <dev:type>
          <maml:name>SwitchParameter</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>False</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
        <maml:name>endpoint</maml:name>
        <maml:description>
          <maml:para>OpenAI Service Endpoint</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
        <dev:type>
          <maml:name>String</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>None</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
        <maml:name>engine</maml:name>
        <maml:description>
          <maml:para>Model name</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
        <dev:type>
          <maml:name>String</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>None</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
        <maml:name>max_tokens</maml:name>
        <maml:description>
          <maml:para>The maximum number of tokens to generate in the completion. Default value is 1024.</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">Int32</command:parameterValue>
        <dev:type>
          <maml:name>Int32</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>1024</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
        <maml:name>n</maml:name>
        <maml:description>
          <maml:para>The number of completions you requested. Default value is 1.</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">Int32</command:parameterValue>
        <dev:type>
          <maml:name>Int32</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>1</dev:defaultValue>
      </command:parameter>
      <command:parameter required="true" variableLength="true" globbing="false" pipelineInput="True (ByValue)" position="1" aliases="none">
        <maml:name>prompt</maml:name>
        <maml:description>
          <maml:para>You prompt text</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
        <dev:type>
          <maml:name>String</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>None</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
        <maml:name>temperature</maml:name>
        <maml:description>
          <maml:para>What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">Double</command:parameterValue>
        <dev:type>
          <maml:name>Double</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>1</dev:defaultValue>
      </command:parameter>
    </command:parameters>
    <command:inputTypes />
    <command:returnValues />
    <maml:alertSet>
      <maml:alert>
        <maml:para></maml:para>
      </maml:alert>
    </maml:alertSet>
    <command:examples>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 1 --------------------------</maml:title>
        <dev:code>New-OpenAICompletion -prompt "What's the capital of China"</dev:code>
        <dev:remarks>
          <maml:para>You used the prompt parameter to specify your question.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 2 --------------------------</maml:title>
        <dev:code>New-OpenAICompletion "What's the capital of China"</dev:code>
        <dev:remarks>
          <maml:para>You ask the question directly.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 3 --------------------------</maml:title>
        <dev:code>"What's the capital of China" | New-OpenAICompletion</dev:code>
        <dev:remarks>
          <maml:para>You pass the question through the pipelineInput.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 4 --------------------------</maml:title>
        <dev:code>noc "What's the capital of China"</dev:code>
        <dev:remarks>
          <maml:para>You used the alias of the cmdlet (noc means New-OpenAICompletion).</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 5 --------------------------</maml:title>
        <dev:code>New-OpenAICompletion -prompt "What's the capital of China" -api_key "sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" -engine "text-davinci-003" -endpoint "https://api.openai.com/v1/completions"</dev:code>
        <dev:remarks>
          <maml:para>You can fully control the value of each parameter.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 6 --------------------------</maml:title>
        <dev:code>New-OpenAICompletion -prompt "What's the capital of China" -azure</dev:code>
        <dev:remarks>
          <maml:para>You used Azure OpenAI service to do same thing.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 7 --------------------------</maml:title>
        <dev:code>New-OpenAICompletion -prompt "What's the capital of China" -azure -api_key "xxxxxxxxxxxxxxx" -engine "chenxizhang" -endpoint "https://chenxizhang.openai.azure.com/"</dev:code>
        <dev:remarks>
          <maml:para>You can fully control the value of each parameter.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 8 --------------------------</maml:title>
        <dev:code>"What's the capital of China","韭菜炒蛋怎么做" | noc -azure</dev:code>
        <dev:remarks>
          <maml:para>You pass two (or more,even in different languages) prompts through pipelineInput.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 9 --------------------------</maml:title>
        <dev:code>Get-Children *.txt | Get-Content | noc -azure</dev:code>
        <dev:remarks>
          <maml:para>You pass all the content of files in a folder to the openAI service.</maml:para>
        </dev:remarks>
      </command:example>
    </command:examples>
    <command:relatedLinks>
      <maml:navigationLink>
        <maml:linkText>Github link</maml:linkText>
        <maml:uri>https://github.com/code365opensource/code365scripts/tree/master/code365scripts.openai</maml:uri>
      </maml:navigationLink>
    </command:relatedLinks>
  </command:command>
  <command:command xmlns:maml="http://schemas.microsoft.com/maml/2004/10" xmlns:command="http://schemas.microsoft.com/maml/dev/command/2004/10" xmlns:dev="http://schemas.microsoft.com/maml/dev/2004/10" xmlns:MSHelp="http://msdn.microsoft.com/mshelp">
    <command:details>
      <command:name>New-OpenAIConversation</command:name>
      <command:verb>New</command:verb>
      <command:noun>OpenAIConversation</command:noun>
      <maml:description>
        <maml:para>Create a conversation with OpenAI Service</maml:para>
      </maml:description>
    </command:details>
    <maml:description>
      <maml:para>If you want to have the experience like ChatGPT, that is design for you. The cmdlet can create a converstaion with OpenAI service, and supports single-line prompt, multi-lines prompt, or even load the prompt from a file.</maml:para>
    </maml:description>
    <command:syntax>
      <command:syntaxItem>
        <maml:name>New-OpenAIConversation</maml:name>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="1" aliases="none">
          <maml:name>api_key</maml:name>
          <maml:description>
            <maml:para>OpenAI API Key, you can specify it by environment variable (OPENAI_API_KEY or OPENAI_API_KEY_AZURE)</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
          <dev:type>
            <maml:name>String</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>None</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="2" aliases="none">
          <maml:name>engine</maml:name>
          <maml:description>
            <maml:para>The name of your model. You can specify it by environment variable (OPENAI_ENGINE or OPENAI_ENGINE_AZURE). If you are using OpenAI original service, you can ignore this parameter, the default model name will be text-davinci-003.</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
          <dev:type>
            <maml:name>String</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>None</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="3" aliases="none">
          <maml:name>endpoint</maml:name>
          <maml:description>
            <maml:para>OpenAI service endpoint. You can specify it by environment variable (OPENAI_ENDPOINT or OPENAI_ENDPOINT_AZURE).If you are using OpenAI original serivce, you can ignore this parameter, and the value will always be https://api.openai.com/v1/completions.</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
          <dev:type>
            <maml:name>String</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>None</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="4" aliases="none">
          <maml:name>max_tokens</maml:name>
          <maml:description>
            <maml:para>The maximum number of tokens to generate in the completion. Default value is 1024.</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">Int32</command:parameterValue>
          <dev:type>
            <maml:name>Int32</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>1024</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="5" aliases="none">
          <maml:name>temperature</maml:name>
          <maml:description>
            <maml:para>What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.</maml:para>
          </maml:description>
          <command:parameterValue required="true" variableLength="false">Double</command:parameterValue>
          <dev:type>
            <maml:name>Double</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>1</dev:defaultValue>
        </command:parameter>
        <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
          <maml:name>azure</maml:name>
          <maml:description>
            <maml:para>If you want to use Azure OpenAI service?</maml:para>
          </maml:description>
          <dev:type>
            <maml:name>SwitchParameter</maml:name>
            <maml:uri />
          </dev:type>
          <dev:defaultValue>False</dev:defaultValue>
        </command:parameter>
      </command:syntaxItem>
    </command:syntax>
    <command:parameters>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="1" aliases="none">
        <maml:name>api_key</maml:name>
        <maml:description>
          <maml:para>OpenAI API Key, you can specify it by environment variable (OPENAI_API_KEY or OPENAI_API_KEY_AZURE)</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
        <dev:type>
          <maml:name>String</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>None</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="named" aliases="none">
        <maml:name>azure</maml:name>
        <maml:description>
          <maml:para>If you want to use Azure OpenAI service?</maml:para>
        </maml:description>
        <command:parameterValue required="false" variableLength="false">SwitchParameter</command:parameterValue>
        <dev:type>
          <maml:name>SwitchParameter</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>False</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="3" aliases="none">
        <maml:name>endpoint</maml:name>
        <maml:description>
          <maml:para>OpenAI service endpoint. You can specify it by environment variable (OPENAI_ENDPOINT or OPENAI_ENDPOINT_AZURE).If you are using OpenAI original serivce, you can ignore this parameter, and the value will always be https://api.openai.com/v1/completions.</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
        <dev:type>
          <maml:name>String</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>None</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="2" aliases="none">
        <maml:name>engine</maml:name>
        <maml:description>
          <maml:para>The name of your model. You can specify it by environment variable (OPENAI_ENGINE or OPENAI_ENGINE_AZURE). If you are using OpenAI original service, you can ignore this parameter, the default model name will be text-davinci-003.</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">String</command:parameterValue>
        <dev:type>
          <maml:name>String</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>None</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="4" aliases="none">
        <maml:name>max_tokens</maml:name>
        <maml:description>
          <maml:para>The maximum number of tokens to generate in the completion. Default value is 1024.</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">Int32</command:parameterValue>
        <dev:type>
          <maml:name>Int32</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>1024</dev:defaultValue>
      </command:parameter>
      <command:parameter required="false" variableLength="true" globbing="false" pipelineInput="False" position="5" aliases="none">
        <maml:name>temperature</maml:name>
        <maml:description>
          <maml:para>What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.</maml:para>
        </maml:description>
        <command:parameterValue required="true" variableLength="false">Double</command:parameterValue>
        <dev:type>
          <maml:name>Double</maml:name>
          <maml:uri />
        </dev:type>
        <dev:defaultValue>1</dev:defaultValue>
      </command:parameter>
    </command:parameters>
    <command:inputTypes />
    <command:returnValues />
    <maml:alertSet>
      <maml:alert>
        <maml:para></maml:para>
      </maml:alert>
    </maml:alertSet>
    <command:examples>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 1 --------------------------</maml:title>
        <dev:code>New-OpenAIConversation</dev:code>
        <dev:remarks>
          <maml:para>You can use all the default value of parameters, just start with the simplest cmdlet.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 2 --------------------------</maml:title>
        <dev:code>New-OpenAIConversation -azure</dev:code>
        <dev:remarks>
          <maml:para>You can use all the default value of parameters, but use Azure OpenAI service instead of the original one.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 3 --------------------------</maml:title>
        <dev:code>gpt</dev:code>
        <dev:remarks>
          <maml:para>You can use the alias of this cmdlet, gpt or oai means New-OpenAIConversation.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 4 --------------------------</maml:title>
        <dev:code>gpt -azure</dev:code>
        <dev:remarks>
          <maml:para>You can use the alias of this cmdlet to call Azure OpenAI service.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 5 --------------------------</maml:title>
        <dev:code>gpt -api_key $api_key -engine $engine</dev:code>
        <dev:remarks>
          <maml:para>You can feel free to specify some custom parameter.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 6 --------------------------</maml:title>
        <dev:code>gpt -api_key $api_key -engine $engine -endpoint $endpoint -azure</dev:code>
        <dev:remarks>
          <maml:para>You can feel free to specify some custom parameter.</maml:para>
        </dev:remarks>
      </command:example>
      <command:example>
        <maml:title>-------------------------- EXAMPLE 7 --------------------------</maml:title>
        <dev:code>gpt -api_key $api_key -engine $engine -endpoint $endpoint -azure -max_tokens 1024 -temperature 1 -n 1</dev:code>
        <dev:remarks>
          <maml:para>You can feel free to specify some custom parameter.</maml:para>
        </dev:remarks>
      </command:example>
    </command:examples>
    <command:relatedLinks>
      <maml:navigationLink>
        <maml:linkText>Github link</maml:linkText>
        <maml:uri>https://github.com/code365opensource/code365scripts/tree/master/code365scripts.openai</maml:uri>
      </maml:navigationLink>
    </command:relatedLinks>
  </command:command>
</helpItems>