mirror of
https://github.com/0xacx/chatGPT-shell-cli.git
synced 2025-02-23 02:55:29 +00:00
Merge pull request #14 from 0xacx/add-request-parameter-modification
Add request parameters, temp, model, tokens, image size. Add default …
This commit is contained in:
commit
32be4f1688
45
chatgpt.sh
45
chatgpt.sh
@ -9,6 +9,41 @@ handleError() {
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
# parse command line arguments
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
case $1 in
|
||||
-t | --temperature)
|
||||
TEMPERATURE="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
--max-tokens)
|
||||
MAX_TOKENS="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
-m | --model)
|
||||
MODEL="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
-s | --size)
|
||||
SIZE="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Unknown parameter: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# set defaults
|
||||
TEMPERATURE=${TEMPERATURE:-0.7}
|
||||
MAX_TOKENS=${MAX_TOKENS:-1024}
|
||||
MODEL=${MODEL:-text-davinci-003}
|
||||
SIZE=${SIZE:-512x512}
|
||||
|
||||
echo -e "Welcome to chatgpt. You can quit with '\033[36mexit\033[0m'."
|
||||
running=true
|
||||
@ -33,7 +68,7 @@ while $running; do
|
||||
-d '{
|
||||
"prompt": "'"${prompt#*image:}"'",
|
||||
"n": 1,
|
||||
"size": "512x512"
|
||||
"size": "'"$SIZE"'"
|
||||
}')
|
||||
handleError "$image_response"
|
||||
image_url=$(echo $image_response | jq -r '.data[0].url')
|
||||
@ -69,16 +104,18 @@ while $running; do
|
||||
else
|
||||
# escape quotation marks
|
||||
escaped_prompt=$(echo "$prompt" | sed 's/"/\\"/g')
|
||||
# escape new lines
|
||||
escaped_prompt=${escaped_prompt//$'\n'/' '}
|
||||
# request to OpenAI API
|
||||
response=$(curl https://api.openai.com/v1/completions \
|
||||
-sS \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "Authorization: Bearer $OPENAI_KEY" \
|
||||
-d '{
|
||||
"model": "text-davinci-003",
|
||||
"model": "'"$MODEL"'",
|
||||
"prompt": "'"${escaped_prompt}"'",
|
||||
"max_tokens": 1000,
|
||||
"temperature": 0.7
|
||||
"max_tokens": '$MAX_TOKENS',
|
||||
"temperature": '$TEMPERATURE'
|
||||
}')
|
||||
|
||||
handleError "$response"
|
||||
|
Loading…
Reference in New Issue
Block a user