-
Notifications
You must be signed in to change notification settings - Fork 1
/
gpt-chat-mode.el
138 lines (105 loc) · 3.98 KB
/
gpt-chat-mode.el
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
;;; gpt-chat-mode.el --- Just another emacs chatGPT app ---*- lexical-binding: t; -*-
;;
;; Copyright (C) 2022 Ariel Serranoni
;;
;; Author: Ariel Serranoni <[email protected]>
;; Maintainer: Ariel Serranoni <[email protected]>
;; Created: December 29, 2022
;; Modified: December 29, 2022
;; Version: 0.0.1
;; Keywords: abbrev bib c calendar comm convenience data docs emulations extensions faces files frames games hardware help hypermedia i18n internal languages lisp local maint mail matching mouse multimedia news outlines processes terminals tex tools unix vc wp
;; Homepage: https://github.com/aserranoni/gpt-chat-mode
;; Package-Requires: ((emacs "24.3"))
;;
;; This file is not part of GNU Emacs.
;;
;;; Commentary:
;;
;; This code is the result of me studying emacs lisp and wrapping the openAI API.
;;
;;; Code:
(require 'lui)
(require 'url)
(require 'json)
(define-derived-mode gpt-chat-mode lui-mode "AI-Chat"
(setq lui-input-function #'interact-gpt-assistant)
(setq lui-post-output-hook #'(lambda () (fill-region (point-min) (point-max))))
(lui-set-prompt ">> "))
(defun get-string-from-file (file-name)
(with-temp-buffer (insert-file file-name)
(substring (buffer-string) 0 -1)))
(defun get-integers (x)
(let (result)
(dotimes (i x)
(setq result (cons i result)))
(reverse result)))
(defvar openai-key-file "~/emacs-gpt/key")
(defvar openai-base-url "https://api.openai.com/v1")
(defvar openai-api-key (get-string-from-file openai-key-file))
(defvar gpt-bot-welcome-message "\nWelcome to the chat GPT bot assistant. Ask me anything!\n")
(defvar ask-gpt-default-buffer-name "*ASKGPT*")
(defvar default-headers
(list (cons "Content-Type" "application/json")
(cons "Authorization" (format "Bearer %s" openai-api-key))))
(defun format-api-url (endpoint)
(format "%s/%s" openai-base-url endpoint))
(defun format-sample-data (query role model)
(let (
(messages-field '(
("role" . "dummy")
("content" . "dummy")
))
(data '(
("model" . "dummy")
("messages" . "dummy")
("max_tokens" . 2000)
("temperature" . 0))))
(setcdr (elt data 0) model)
(setcdr (elt messages-field 1) query)
(setcdr (elt messages-field 0) role)
(setcdr (elt data 1) (list messages-field))
data))
;(format-sample-data "Hey There" "user" "gpt-3.5-turbo")
(defun make-api-request (endpoint query role model)
(let (
(url (format-api-url endpoint))
(url-request-method "POST")
(url-request-extra-headers default-headers)
(url-request-data (json-encode (format-sample-data query role model))))
(with-current-buffer (url-retrieve-synchronously url)
(goto-char (point-min))
(re-search-forward "^$")
(json-read))))
;(defvar sample-response (make-api-request "chat/completions" "Hey There" "user" "gpt-3.5-turbo"))
;(cdr (assoc 'content (cdr (car (elt (cdr (assoc 'choices sample-response)) 0)))))
(defun format-gpt-response (resp)
(cdr (assoc 'content (cdr (car (elt (cdr (assoc 'choices resp)) 0)))))
)
;(format-gpt-response sample-response)
(defun gpt-interact (x role)
(let (
(str (format-gpt-response (make-api-request "chat/completions" x role "gpt-3.5-turbo"))))
(lui-insert str)
(fill-region (point-min) (point-max))))
(defun interact-gpt-assistant (x)
(gpt-interact x "assistant")
)
(defun chat-get-buffer-create (name)
(let (
(buffer (get-buffer name)))
(unless buffer
(setq buffer (get-buffer-create name))
(with-current-buffer buffer
(gpt-chat-mode)
(lui-insert gpt-bot-welcome-message)))
buffer))
;;;###autoload
(defun start-chatting ()
(interactive)
(let (
(buffer (chat-get-buffer-create ask-gpt-default-buffer-name)))
(with-current-buffer buffer
(goto-char (point-max)))
(switch-to-buffer buffer)))
(provide 'gpt-chat-mode)
;;; gpt-chat-mode.el ends here