main.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374
  1. import io
  2. import os
  3. import json
  4. import requests
  5. from openai import OpenAI
  6. from flask import Flask, request, jsonify, send_from_directory, url_for
  7. from convert import alpaca_to_chatgpt, csv_to_jsonl
  8. app = Flask(__name__)
  9. ssl = None
  10. # ssl =('/etc/ssl/sample.crt', '/etc/ssl/sample.pem')
  11. app.openai_key = os.environ.get("OPENAI_KEY", "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")
  12. app.openai_client = OpenAI(api_key=app.openai_key)
  13. app.chat_messages = [
  14. {"role": "system",
  15. "content": "Please respond professionally and in a friendly manner, using the same language as the original request."}
  16. ]
  17. app.translate_messages = [
  18. {"role": "system",
  19. "content": "Please translate using the requested language."}
  20. ]
  21. app.suggest_messages = [
  22. {"role": "system",
  23. "content": "Please suggest reply messages based on the previous conversations and the user's request."}
  24. ]
  25. app.recommend_messages = [
  26. {"role": "system",
  27. "content": "Give normalized total weight of each category in json based on headlines"
  28. }
  29. ]
  30. app.summary_messages = [
  31. {"role": "system",
  32. "content": "Please summarize an article."
  33. }
  34. ]
  35. UPLOAD_FOLDER = 'files'
  36. app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
  37. @app.route('/files/<name>')
  38. def download_file(name):
  39. return send_from_directory(app.config["UPLOAD_FOLDER"], name)
  40. @app.route('/', methods=['GET', 'POST'])
  41. def test():
  42. return jsonify({"status": "0"})
  43. def recommend(headlines, category):
  44. chat_messages = app.recommend_messages.copy()
  45. try:
  46. json_payload = {
  47. "role": "user",
  48. "content": f"""{headlines}
  49. Berikan nilai berat masing-masing kategori, jumlahkan dan normalisasikan:
  50. {category}
  51. Berikan dalam bentuk json
  52. """
  53. }
  54. chat_messages.append(json_payload)
  55. print(chat_messages)
  56. json_response = app.openai_client.chat.completions.create(model="gpt-3.5-turbo-1106",
  57. messages=chat_messages,
  58. response_format={"type": "json_object"}
  59. )
  60. print(json_response.choices[0].message.content)
  61. return json.loads(json_response.choices[0].message.content)
  62. except Exception as error_print:
  63. app.logger.error(error_print)
  64. result = {}, 405
  65. def vision(message, image_url=None, image_b64=None):
  66. chat_messages = app.chat_messages.copy()
  67. url = ""
  68. if image_url:
  69. url = f"{image_url}"
  70. elif image_b64:
  71. url = f"data:image/jpeg;base64,{image_b64}"
  72. try:
  73. json_payload = {
  74. "role": "user",
  75. "content": [
  76. {"type": "text", "text": message},
  77. {
  78. "type": "image_url",
  79. "image_url": {
  80. "url": url,
  81. },
  82. },
  83. ],
  84. }
  85. chat_messages.append(json_payload)
  86. print(chat_messages)
  87. json_response = app.openai_client.chat.completions.create(
  88. model="gpt-4o",
  89. messages=chat_messages,
  90. max_tokens=500
  91. )
  92. return {"role": "assistant", "content": json_response.choices[0].message.content}
  93. except Exception as error_print:
  94. app.logger.error(error_print)
  95. result = {}, 405
  96. @app.route('/gpt', methods=['POST'])
  97. def gpt():
  98. chat_messages = app.chat_messages.copy()
  99. chat_model = "gpt-3.5-turbo"
  100. use_video = False
  101. suggest = False
  102. summarize = False
  103. predict_q = 0
  104. max_char_msg = 500
  105. max_resp_token = 600
  106. category = []
  107. headlines = []
  108. image_url = ""
  109. num_choices = 1
  110. json_payload = request.get_json()
  111. if not json_payload:
  112. json_payload = []
  113. has_named_params = False
  114. if isinstance(json_payload, dict):
  115. has_named_params = 'payload' in json_payload
  116. if 'payload' in json_payload:
  117. if 'predict_q' in json_payload:
  118. predict_q = 5 if json_payload['predict_q'] > 5 else 0 if json_payload['predict_q'] < 0 else json_payload['predict_q']
  119. if 'num_choices' in json_payload:
  120. num_choices = 5 if json_payload['num_choices'] > 5 else 1 if json_payload['num_choices'] < 1 else json_payload['num_choices']
  121. if 'use_video' in json_payload:
  122. use_video = json_payload['use_video'] == "1"
  123. if 'chat_model' in json_payload:
  124. chat_model = json_payload['chat_model']
  125. max_resp_token = 2048
  126. if 'translate' in json_payload:
  127. chat_messages = app.translate_messages.copy()
  128. json_payload['payload'][-1]['content'] = json_payload['payload'][-1][
  129. 'content'] + f" (Translate to {json_payload['translate']})"
  130. elif 'suggest' in json_payload:
  131. suggest = json_payload['suggest'] == "1"
  132. if suggest:
  133. chat_messages = app.suggest_messages.copy()
  134. else:
  135. chat_messages = app.chat_messages.copy()
  136. json_payload['payload'][-1]['content'] = json_payload['payload'][-1][
  137. 'content'] + f" What can I say to him/her?"
  138. elif 'summarize' in json_payload:
  139. summarize = json_payload['summarize'] == "1"
  140. if summarize:
  141. chat_messages = app.summary_messages.copy()
  142. max_char_msg = 2000
  143. max_resp_token = 1000
  144. else:
  145. chat_messages = app.chat_messages.copy()
  146. json_payload['payload'][-1]['content'] = f"Please summarize this article:\n" + \
  147. json_payload['payload'][-1]['content']
  148. else:
  149. chat_messages = app.chat_messages.copy()
  150. json_payload = json_payload['payload']
  151. if isinstance(json_payload, dict):
  152. json_payload = [json_payload]
  153. elif 'greeting' in json_payload:
  154. chat_messages = app.chat_messages.copy()
  155. company_name = json_payload['greeting']['company_name']
  156. timestamp = json_payload['greeting']['timestamp']
  157. islamic_message = f"Apakah Nama '{company_name}' terdapat unsur islami? Jawab dengan 'Ya' atau 'Tidak'"
  158. islam_messages = app.chat_messages.copy()
  159. islam_messages.append({
  160. "role": "user",
  161. "content": islamic_message
  162. })
  163. islamic_response = app.openai_client.chat.completions.create(model="gpt-3.5-turbo", # GPT-3.5 Turbo engine
  164. messages=islam_messages,
  165. max_tokens=2, temperature=0.5)
  166. if 'Ya' in islamic_response.choices[0].message.content:
  167. greeting_message = f"Buatkan respons chatbot berupa greeting dari chat perusahaan bernama {company_name} pada jam {timestamp}, tidak perlu mention waktu, dan jawab dengan 'Assalamu'alaikum...' terlebih dahulu"
  168. else:
  169. greeting_message = f"Buatkan respons chatbot berupa greeting dari chat perusahaan bernama {company_name} pada jam {timestamp}, tidak perlu mention waktu"
  170. json_payload = [
  171. {
  172. "role": "user",
  173. "content": greeting_message
  174. }
  175. ]
  176. elif 'recommend' in json_payload:
  177. headlines = json_payload['recommend']['headlines']
  178. category = json_payload['recommend']['category']
  179. return recommend(headlines, category)
  180. elif 'image_url' in json_payload:
  181. image = json_payload['image_url']
  182. message = json_payload["message"] if 'message' in json_payload else "Ini gambar apa?"
  183. return vision(message, image_url=image)
  184. elif 'image_b64' in json_payload:
  185. image = json_payload['image_b64']
  186. message = json_payload["message"] if 'message' in json_payload else "Ini gambar apa?"
  187. return vision(message, image_b64=image_url)
  188. else:
  189. chat_messages = app.chat_messages.copy()
  190. json_payload = [json_payload]
  191. json_payload = json_payload[-5:]
  192. for message in json_payload:
  193. if message['role'] == 'user':
  194. content = message['content'].lower()
  195. else:
  196. content = message['content']
  197. content_arr = content.split(" ")
  198. new_content_arr = content[:max_char_msg].split(" ")
  199. new_content_len = len(new_content_arr)
  200. arr = []
  201. for i in range(new_content_len):
  202. arr.append(content_arr[i])
  203. message['content'] = " ".join(arr)
  204. chat_messages.append(message)
  205. app.logger.info(chat_messages)
  206. result = {}
  207. try:
  208. n = num_choices
  209. json_response = app.openai_client.chat.completions.create(model=chat_model,
  210. messages=chat_messages,
  211. max_tokens=max_resp_token, temperature=0.7, n=n)
  212. app.logger.info(json_response.choices[0].message)
  213. if has_named_params:
  214. if suggest:
  215. choices = json_response.choices
  216. messages = [i.message for i in choices]
  217. json_formatted = []
  218. for message in messages:
  219. json_formatted.append({"role": "assistant", "content": message.content})
  220. result = {"url": "", "message": json_formatted}
  221. else:
  222. if use_video:
  223. # TODO: to be implemented
  224. result = {"url": url_for('download_file', name="test.mp4", _external=True),
  225. "message": {"role": "assistant", "content": json_response.choices[0].message.content}}
  226. else:
  227. result = {"role": "assistant", "content": json_response.choices[0].message.content}
  228. if predict_q:
  229. query_q = {
  230. "role": "user",
  231. "content": f"Berikan {predict_q} pertanyaan lain yang akan saya ajukan berdasarkan percakapan kali ini dalam bentuk json array"
  232. }
  233. chat_messages.append(result)
  234. chat_messages.append(query_q)
  235. json_response_q = app.openai_client.chat.completions.create(model=chat_model,
  236. messages=chat_messages,
  237. max_tokens=max_resp_token,
  238. temperature=0.2, response_format={"type": "json_object"})
  239. json_response_dict = json.loads(json_response_q.choices[0].message.content)
  240. print(json_response_dict)
  241. if json_response_dict is not None:
  242. if isinstance(json_response_dict, dict):
  243. first_key = next(iter(json_response_dict))
  244. json_response_dict = json_response_dict[first_key]
  245. elif isinstance(json_response_dict, str):
  246. json_response_dict = [json_response_dict]
  247. result["predict_q"] = json_response_dict
  248. else:
  249. result = {"role": "assistant", "content": json_response.choices[0].message.content}
  250. except Exception as error_print:
  251. app.logger.error(error_print)
  252. result = {}, 405
  253. return result
  254. @app.route('/train', methods=['POST'])
  255. def train():
  256. prev_model = "gpt-3.5-turbo"
  257. if 'job_id' in request.form:
  258. return train_with_id(job_id=request.form['job_id'])
  259. elif 'train_file' in request.files:
  260. train_file = request.files['train_file']
  261. app.logger.info({"filename": train_file.filename})
  262. openai_file = None
  263. if train_file.filename.split('.')[1] == 'jsonl':
  264. openai_file = train_file.stream.read()
  265. elif train_file.filename.split('.')[1] == 'csv':
  266. openai_file = csv_to_jsonl(train_file.stream.read())
  267. elif train_file.filename.split('.')[1] == 'json':
  268. openai_file = alpaca_to_chatgpt(train_file)
  269. if 'prev_model' in request.form:
  270. prev_model = request.form['prev_model']
  271. app.logger.info(f"Previous model: {prev_model}")
  272. if 'mock' not in request.form:
  273. f = app.openai_client.files.create(
  274. file=openai_file,
  275. purpose="fine-tune"
  276. )
  277. job = app.openai_client.fine_tuning.jobs.create(
  278. training_file=f.id,
  279. model=prev_model,
  280. hyperparameters={
  281. "n_epochs": 5
  282. }
  283. )
  284. app.logger.info({"mock": "no", "status": job.status, "job_id": job.id})
  285. return {"status": job.status, "job_id": job.id}
  286. else:
  287. app.logger.info({"mock": "yes", "status": "ok"})
  288. return {"status": "ok"}
  289. else:
  290. app.logger.error({"status": "error", "message": "Training file not found"})
  291. return {"status": "error", "message": "Training file not found"}
  292. def train_with_id(job_id):
  293. try:
  294. job = app.openai_client.fine_tuning.jobs.retrieve(job_id)
  295. if job.fine_tuned_model is None:
  296. app.logger.info({"job_id": job_id, "status": job.status})
  297. return {"status": job.status}
  298. else:
  299. app.logger.info({"job_id": job_id, "status": job.status, "model_name": job.fine_tuned_model})
  300. return {"status": job.status, "model_name": job.fine_tuned_model}
  301. except Exception as error_print:
  302. app.logger.error(error_print)
  303. return {"status": "Could not find job from id"}
  304. @app.route('/llama', methods=['POST'])
  305. def llama():
  306. max_char_msg = 500
  307. max_resp_token = 600
  308. json_payload = request.get_json()
  309. if not json_payload:
  310. json_payload = []
  311. has_named_params = False
  312. if isinstance(json_payload, dict):
  313. has_named_params = 'payload' in json_payload
  314. if 'payload' in json_payload:
  315. json_payload = json_payload['payload']
  316. if isinstance(json_payload, dict):
  317. json_payload = [json_payload]
  318. else:
  319. json_payload = [json_payload]
  320. message = json_payload[-1]
  321. content = message['content']
  322. content_arr = content.split(" ")
  323. new_content_arr = content[:max_char_msg].split(" ")
  324. new_content_len = len(new_content_arr)
  325. arr = []
  326. for i in range(new_content_len):
  327. arr.append(content_arr[i])
  328. content = " ".join(arr)
  329. content = content + " Jawab dengan Bahasa Indonesia"
  330. try:
  331. json_request = {
  332. "model": "llama3",
  333. "prompt": content,
  334. "stream": False
  335. }
  336. r = requests.post("http://localhost:11434/api/generate", json=json_request)
  337. if r.status_code == 200:
  338. result = {
  339. "role": "assistant",
  340. "content": r.json()["response"]
  341. }
  342. else:
  343. result = {}, r.status_code
  344. except Exception as error_print:
  345. app.logger.error(error_print)
  346. result = {}, 405
  347. return result
  348. # Press the green button in the gutter to run the script.
  349. if __name__ == '__main__':
  350. app.run(host='0.0.0.0', port=8348, debug=True, ssl_context=ssl)
  351. # See PyCharm help at https://www.jetbrains.com/help/pycharm/