main.py 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789
  1. import datetime
  2. import logging
  3. import os
  4. import json
  5. import re
  6. import time
  7. import uuid
  8. import random
  9. import openai
  10. import requests
  11. from openai import OpenAI
  12. from flask import Flask, request, jsonify, send_from_directory, url_for
  13. from convert import alpaca_to_chatgpt, csv_to_jsonl
  14. app = Flask(__name__)
  15. ssl = None
  16. # ssl =('/etc/ssl/sample.crt', '/etc/ssl/sample.pem')
  17. app.openai_key = os.environ.get("OPENAI_KEY", "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")
  18. app.openai_client = OpenAI(api_key=app.openai_key)
  19. #logging.basicConfig(level=logging.DEBUG, filename='/jkt-disk-01/app/mms/chatgpt-apache/chatgpt.log', format='%(asctime)s %(message)s')
  20. app.chat_messages = [
  21. {"role": "system",
  22. "content": "Please respond professionally and in a friendly manner, using the same language as the original request. Use emoji responsibly."}
  23. ]
  24. app.translate_messages = [
  25. {"role": "system",
  26. "content": "Please translate using the requested language."}
  27. ]
  28. app.suggest_messages = [
  29. {"role": "system",
  30. "content": "Please suggest reply messages based on the previous conversations and the user's request."}
  31. ]
  32. app.recommend_messages = [
  33. {"role": "system",
  34. "content": "Give normalized total weight of each category in json based on headlines"
  35. }
  36. ]
  37. app.summary_messages = [
  38. {"role": "system",
  39. "content": "Please summarize an article."
  40. }
  41. ]
  42. UPLOAD_FOLDER = 'files'
  43. app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
  44. @app.route('/files/<name>')
  45. def download_file(name):
  46. return send_from_directory(app.config["UPLOAD_FOLDER"], name)
  47. @app.route('/', methods=['GET', 'POST'])
  48. def test():
  49. return jsonify({"status": "0"})
  50. def roulette() -> str:
  51. roulette_arr = [(80, "gpt-4o-mini"), (20, "gpt-4o")]
  52. rand_num = random.randrange(0, 99)
  53. model_name = ""
  54. n = 0
  55. j = 0
  56. while rand_num >= n:
  57. n += roulette_arr[j][0]
  58. model_name = roulette_arr[j][1]
  59. print(model_name)
  60. j += 1
  61. return model_name
  62. def prune_message(message: dict):
  63. m = message.copy()
  64. for k in list(m.keys()):
  65. if k != 'role' and k != 'content':
  66. m.pop(k)
  67. return m
  68. def recommend(headlines, category):
  69. chat_messages = app.recommend_messages.copy()
  70. try:
  71. json_payload = {
  72. "role": "user",
  73. "content": f"""{headlines}
  74. Berikan nilai berat masing-masing kategori, jumlahkan dan normalisasikan:
  75. {category}
  76. Berikan dalam bentuk json
  77. """
  78. }
  79. chat_messages.append(json_payload)
  80. time.sleep(3)
  81. json_response = app.openai_client.chat.completions.create(model="gpt-4o-mini",
  82. messages=chat_messages,
  83. response_format={"type": "json_object"}
  84. )
  85. result = {"status": "ok", "message": json.loads(json_response.choices[0].message.content)}
  86. except openai.APITimeoutError as e:
  87. app.logger.exception("error")
  88. result = {"status": "error", "message": e.message}, 408
  89. except openai.NotFoundError as e:
  90. app.logger.exception("error")
  91. result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
  92. except Exception as error_print:
  93. app.logger.exception("error")
  94. result = {"status": "error", "message": "Please try again"}, 405
  95. return result
  96. def vision(message, image_url=None, image_b64=None):
  97. chat_messages = app.chat_messages.copy()
  98. url = ""
  99. if image_url:
  100. url = f"{image_url}"
  101. elif image_b64:
  102. url = f"data:image/jpeg;base64,{image_b64}"
  103. try:
  104. json_payload = {
  105. "role": "user",
  106. "content": [
  107. {"type": "text", "text": message},
  108. {
  109. "type": "image_url",
  110. "image_url": {
  111. "url": url,
  112. },
  113. },
  114. ],
  115. }
  116. chat_messages.append(json_payload)
  117. time.sleep(3)
  118. json_response = app.openai_client.chat.completions.create(
  119. model="gpt-4o",
  120. messages=chat_messages,
  121. max_tokens=500
  122. )
  123. result = {"role": "assistant", "content": json_response.choices[0].message.content}
  124. except openai.APITimeoutError as e:
  125. app.logger.exception("error")
  126. result = {"status": "error", "message": e.message}, 408
  127. except openai.NotFoundError as e:
  128. app.logger.exception("error")
  129. result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
  130. except Exception as e:
  131. app.logger.exception("error")
  132. result = {"status": "error", "message": "Please try again"}, 405
  133. return result
  134. def suggest_schedule(sched: list, message: list, assistant_id=None, timestamp=None):
  135. result = {}
  136. if timestamp is None:
  137. timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
  138. message[-1]["content"] = f"{message[-1]['content']} Sekarang jam {timestamp}. Berikut contoh form nya jika ingin mengisi meeting:\nTitle:\nDeskripsi:\nWaktu Mulai:\nWaktu Selesai:\nLokasi:\nPartisipan: @User1 @User2 @User3\n\nBerikut jadwalnya:\n{json.dumps(sched)}"
  139. try:
  140. if assistant_id:
  141. runs = app.openai_client.beta.threads.create_and_run_poll(
  142. assistant_id=assistant_id,
  143. thread={
  144. "messages": message
  145. }
  146. )
  147. if runs.status == "completed":
  148. messages = list(app.openai_client.beta.threads.messages.list(thread_id=runs.thread_id, run_id=runs.id))
  149. message_content = messages[0].content[0].text
  150. app.logger.info(message_content.value)
  151. pattern = re.compile(r"【\d+:\d+†\(?source\)?】")
  152. filtered_message = pattern.sub("", message_content.value)
  153. result = {"role": "assistant", "content": filtered_message}
  154. else:
  155. json_response = app.openai_client.chat.completions.create(model="gpt-4o",
  156. messages=message,
  157. temperature=0.7)
  158. response_message = json_response.choices[0].message.content
  159. result = {"role": "assistant", "content": response_message}
  160. except openai.APITimeoutError as e:
  161. app.logger.exception("error")
  162. result = {"status": "error", "message": e.message}, 408
  163. except openai.NotFoundError as e:
  164. app.logger.exception("error")
  165. result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
  166. except Exception as e:
  167. app.logger.exception("error")
  168. result = {"status": "error", "message": "Please try again"}, 405
  169. return result
  170. def convert_to_schedule(message: list, sched: list = None):
  171. result = {}
  172. if sched is not None:
  173. message[-1][
  174. "content"] = f'{message[-1]["content"]}\n\nBerikut jadwalnya:\n{sched}\n\n Jika tidak ada bentrok jadwal keluarkan menjadi format json seperti berikut:\n\n{{"start_time": "2025-10-01 11:00", "end_time": "2025-10-01 13:00", "title": "Judul kegiatan", "description": "Deskripsi kegiatan", "location": "Lokasi kegiatan" }}\n\nJika bentrok keluarkan informasi dalam bentuk json sebagai berikut:\n\n{{"role": "assistant", "content": "Contoh pesan jadwal bentrok"}}'
  175. else:
  176. message[-1]["content"] = f'{message[-1]["content"]} Ubah menjadi format json seperti berikut:\n\n{{"start_time": "2025-10-01 11:00", "end_time": "2025-10-01 13:00", "title": "Judul Kegiatan", "description": "Deskripsi kegiatan", "location": "Lokasi kegiatan" }}'
  177. try:
  178. json_response = app.openai_client.chat.completions.create(
  179. model="gpt-4o",
  180. messages=message,
  181. temperature=0.2,
  182. response_format={"type": "json_object"}
  183. )
  184. response_message = json_response.choices[0].message.content
  185. result = json.loads(response_message)
  186. except openai.APITimeoutError as e:
  187. app.logger.exception("error")
  188. result = {"status": "error", "message": e.message}, 408
  189. except openai.NotFoundError as e:
  190. app.logger.exception("error")
  191. result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
  192. except Exception as e:
  193. app.logger.exception("error")
  194. result = {"status": "error", "message": "Please try again"}, 405
  195. return result
  196. @app.route('/gpt', methods=['POST'])
  197. def gpt():
  198. assistant_id = ""
  199. assistant = None
  200. chat_messages = app.chat_messages.copy()
  201. chat_model = "gpt-4o-mini"
  202. use_video = False
  203. suggest = False
  204. summarize = False
  205. expression = False
  206. predict_q = 0
  207. max_char_msg = 500
  208. max_resp_token = 600
  209. category = []
  210. headlines = []
  211. image_url = ""
  212. num_choices = 1
  213. json_payload = request.get_json()
  214. if not json_payload:
  215. json_payload = []
  216. has_named_params = False
  217. app.logger.info("Request: " + str(json_payload))
  218. if isinstance(json_payload, dict):
  219. has_named_params = 'payload' in json_payload
  220. if 'payload' in json_payload:
  221. if 'predict_q' in json_payload:
  222. predict_q = 5 if json_payload['predict_q'] > 4 else 0 if json_payload['predict_q'] < 1 else \
  223. json_payload['predict_q']
  224. if 'num_choices' in json_payload:
  225. num_choices = 5 if json_payload['num_choices'] > 4 else 1 if json_payload['num_choices'] < 2 else \
  226. json_payload['num_choices']
  227. if 'use_video' in json_payload:
  228. use_video = json_payload['use_video'] == "1"
  229. if 'chat_model' in json_payload and 'assistant_id' not in json_payload:
  230. chat_model = json_payload['chat_model']
  231. max_resp_token = 2048
  232. if 'expression' in json_payload:
  233. expression = json_payload['expression'] == "1"
  234. if 'translate' in json_payload:
  235. chat_messages = app.translate_messages.copy()
  236. json_payload['payload'][-1]['content'] = json_payload['payload'][-1][
  237. 'content'] + f" (Translate to {json_payload['translate']})"
  238. elif 'suggest' in json_payload:
  239. suggest = json_payload['suggest'] == "1"
  240. if suggest:
  241. chat_messages = app.suggest_messages.copy()
  242. else:
  243. chat_messages = app.chat_messages.copy()
  244. json_payload['payload'][-1]['content'] = json_payload['payload'][-1][
  245. 'content'] + f" What can I say to him/her?"
  246. elif 'summarize' in json_payload:
  247. summarize = json_payload['summarize'] == "1"
  248. if summarize:
  249. chat_messages = app.summary_messages.copy()
  250. max_char_msg = 2000
  251. max_resp_token = 1000
  252. else:
  253. chat_messages = app.chat_messages.copy()
  254. json_payload['payload'][-1]['content'] = f"Please summarize this article:\n" + \
  255. json_payload['payload'][-1]['content']
  256. elif 'assistant_id' in json_payload:
  257. assistant_id = json_payload['assistant_id']
  258. assistant = app.openai_client.beta.assistants.retrieve(assistant_id=assistant_id)
  259. chat_model = assistant.model
  260. else:
  261. chat_messages = app.chat_messages.copy()
  262. if 'schedule' in json_payload and 'convert_schedule' not in json_payload:
  263. timestamp = None
  264. if 'timestamp' in json_payload:
  265. timestamp = json_payload["timestamp"]
  266. sched = json_payload["schedule"].copy()
  267. message = json_payload["payload"].copy()
  268. return suggest_schedule(sched, message, assistant_id, timestamp)
  269. elif 'convert_schedule' in json_payload:
  270. sched = json_payload["schedule"].copy() if 'schedule' in json_payload else None
  271. if json_payload['convert_schedule'] == "1":
  272. return convert_to_schedule(json_payload['payload'].copy(), sched)
  273. json_payload = json_payload['payload']
  274. if isinstance(json_payload, dict):
  275. json_payload = [json_payload]
  276. elif 'greeting' in json_payload:
  277. chat_messages = app.chat_messages.copy()
  278. company_name = json_payload['greeting']['company_name']
  279. timestamp = json_payload['greeting']['timestamp']
  280. islamic_message = f"Apakah Nama '{company_name}' terdapat unsur islami? Jawab dengan 'Ya' atau 'Tidak'"
  281. islam_messages = app.chat_messages.copy()
  282. islam_messages.append({
  283. "role": "user",
  284. "content": islamic_message
  285. })
  286. islamic_response = app.openai_client.chat.completions.create(model="gpt-4o-mini",
  287. messages=islam_messages,
  288. max_tokens=2, temperature=0.5)
  289. if 'Ya' in islamic_response.choices[0].message.content:
  290. greeting_message = f"Buatkan respons chatbot berupa greeting dari chat perusahaan bernama {company_name} pada jam {timestamp}, tidak perlu mention waktu, dan jawab dengan 'Assalamu'alaikum...' terlebih dahulu"
  291. else:
  292. greeting_message = f"Buatkan respons chatbot berupa greeting dari chat perusahaan bernama {company_name} pada jam {timestamp}, tidak perlu mention waktu"
  293. json_payload = [
  294. {
  295. "role": "user",
  296. "content": greeting_message
  297. }
  298. ]
  299. elif 'recommend' in json_payload:
  300. headlines = json_payload['recommend']['headlines']
  301. category = json_payload['recommend']['category']
  302. return recommend(headlines, category)
  303. elif 'image_url' in json_payload:
  304. image = json_payload['image_url']
  305. message = json_payload["message"] if 'message' in json_payload else "Ini gambar apa?"
  306. return vision(message, image_url=image)
  307. elif 'image_b64' in json_payload:
  308. image = json_payload['image_b64']
  309. message = json_payload["message"] if 'message' in json_payload else "Ini gambar apa?"
  310. return vision(message, image_b64=image_url)
  311. else:
  312. app.logger.info("This request use old json format")
  313. chat_messages = app.chat_messages.copy()
  314. app.logger.info("Chat Messages:")
  315. app.logger.info(str(chat_messages))
  316. json_payload = json_payload[-5:]
  317. if assistant_id:
  318. chat_messages = []
  319. for message in json_payload:
  320. p_message = prune_message(message)
  321. if p_message['role'] == 'user':
  322. content = p_message['content'].lower()
  323. else:
  324. content = p_message['content']
  325. content_arr = content.split(" ")
  326. new_content_arr = content[:max_char_msg].split(" ")
  327. new_content_len = len(new_content_arr)
  328. arr = []
  329. for i in range(new_content_len):
  330. arr.append(content_arr[i])
  331. p_message['content'] = " ".join(arr)
  332. chat_messages.append(p_message)
  333. app.logger.info(chat_messages)
  334. result = {}
  335. try:
  336. n = num_choices
  337. if "gpt-3.5-turbo" or "gpt-4o-mini" in chat_model:
  338. chat_model = roulette()
  339. app.logger.info(f"Model used: {chat_model}")
  340. if assistant_id and not suggest:
  341. runs = app.openai_client.beta.threads.create_and_run_poll(
  342. assistant_id=assistant_id,
  343. thread={
  344. "messages": chat_messages
  345. }
  346. )
  347. if runs.status != "completed":
  348. result = {"role": "assistant", "content": "Maaf, saat ini saya sedang sibuk. Coba beberapa saat lagi."}
  349. else:
  350. messages = list(app.openai_client.beta.threads.messages.list(thread_id=runs.thread_id, run_id=runs.id))
  351. try:
  352. message_content = messages[0].content[0].text
  353. app.logger.info(message_content.value)
  354. pattern = re.compile(r"【\d+:\d+†\(?source\)?】")
  355. filtered_message = pattern.sub("", message_content.value)
  356. result = {"role": "assistant", "content": filtered_message}
  357. except IndexError:
  358. result = {"role": "assistant", "content": "Saat ini saya tidak memiliki informasi yang diperlukan untuk menjawab pertanyaan Anda."}
  359. else:
  360. time.sleep(3)
  361. json_response = app.openai_client.chat.completions.create(model=chat_model,
  362. messages=chat_messages,
  363. max_tokens=max_resp_token, temperature=0.7, n=n)
  364. app.logger.info(json_response.choices[0].message)
  365. if has_named_params:
  366. if suggest:
  367. choices = json_response.choices
  368. messages = [i.message for i in choices]
  369. json_formatted = []
  370. for message in messages:
  371. json_formatted.append({"role": "assistant", "content": message.content})
  372. result = {"url": "", "message": json_formatted}
  373. else:
  374. if use_video:
  375. # TODO: to be implemented
  376. result = {"url": url_for('download_file', name="test.mp4", _external=True),
  377. "message": {"role": "assistant", "content": json_response.choices[0].message.content}}
  378. else:
  379. result = {"role": "assistant", "content": json_response.choices[0].message.content}
  380. else:
  381. result = {"role": "assistant", "content": json_response.choices[0].message.content}
  382. if expression:
  383. exprr = expresso(text=result['content'])
  384. result['expression'] = exprr['expression']
  385. if predict_q:
  386. json_response_q = None
  387. if assistant_id:
  388. query_q = {
  389. "role": "user",
  390. "content": f"Berikan {predict_q} pertanyaan random yang akan saya ajukan sesuai topik asisten dalam bentuk json array"
  391. }
  392. else:
  393. query_q = {
  394. "role": "user",
  395. "content": f"Berikan {predict_q} pertanyaan lain yang akan saya ajukan berdasarkan percakapan kali ini dalam bentuk json array"
  396. }
  397. chat_messages.append(prune_message(result))
  398. chat_messages.append(query_q)
  399. if assistant_id:
  400. runs = app.openai_client.beta.threads.create_and_run_poll(
  401. assistant_id=assistant_id,
  402. thread={
  403. "messages": chat_messages
  404. }
  405. )
  406. if runs.status == "completed":
  407. messages = list(app.openai_client.beta.threads.messages.list(thread_id=runs.thread_id, run_id=runs.id))
  408. message_content = messages[0].content[0].text
  409. app.logger.info(message_content.value)
  410. pattern = re.compile(r"【\d+:\d+†\(?source\)?】")
  411. filtered_message = pattern.sub("", message_content.value)
  412. predict_q_arr = [
  413. {
  414. "role": "system",
  415. "content": assistant.instructions
  416. },
  417. {
  418. "role": "assistant",
  419. "content": filtered_message
  420. },
  421. {
  422. "role": "user",
  423. "content": f"Ekstrak {predict_q} pertanyaan tersebut dalam bentuk json array"
  424. }
  425. ]
  426. json_response_q = app.openai_client.chat.completions.create(
  427. model=chat_model,
  428. messages=predict_q_arr,
  429. temperature=0.2,
  430. response_format={"type": "json_object"}
  431. )
  432. else:
  433. json_response_q = app.openai_client.chat.completions.create(model=chat_model,
  434. messages=chat_messages,
  435. max_tokens=max_resp_token,
  436. temperature=0.2,
  437. response_format={"type": "json_object"})
  438. if json_response_q:
  439. json_response_dict = json.loads(json_response_q.choices[0].message.content)
  440. if json_response_dict is not None:
  441. if isinstance(json_response_dict, dict):
  442. if len(json_response_dict) > 1:
  443. qs = []
  444. for q in json_response_dict.values():
  445. qs.append(q)
  446. json_response_dict = qs
  447. else:
  448. try:
  449. first_key = next(iter(json_response_dict))
  450. json_response_dict = json_response_dict[first_key]
  451. except StopIteration:
  452. json_response_dict = []
  453. elif isinstance(json_response_dict, str):
  454. json_response_dict = [json_response_dict]
  455. result["predict_q"] = json_response_dict
  456. except openai.APITimeoutError as e:
  457. app.logger.exception("error")
  458. result = {"status": "error", "message": e.message}, 408
  459. except openai.NotFoundError as e:
  460. app.logger.exception("error")
  461. result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
  462. except Exception:
  463. app.logger.exception("error")
  464. result = {"status": "error", "message": "Please try again"}, 405
  465. app.logger.info("Result: " + str(result))
  466. return json.dumps(result)
  467. @app.route('/train', methods=['POST'])
  468. def train():
  469. prev_model = "gpt-3.5-turbo"
  470. instructions = None
  471. if 'job_id' in request.form:
  472. return train_with_id(job_id=request.form['job_id'])
  473. elif 'train_file' in request.files:
  474. train_file = request.files['train_file']
  475. app.logger.info({"filename": train_file.filename})
  476. if 'instructions' in request.form:
  477. instructions = request.form['instructions']
  478. openai_file = None
  479. if train_file.filename.split('.')[1] == 'jsonl':
  480. openai_file = train_file.stream.read()
  481. elif train_file.filename.split('.')[1] == 'csv':
  482. openai_file = csv_to_jsonl(train_file.stream.read(), instructions)
  483. elif train_file.filename.split('.')[1] == 'json':
  484. openai_file = alpaca_to_chatgpt(train_file, instructions)
  485. if 'prev_model' in request.form:
  486. prev_model = request.form['prev_model']
  487. app.logger.info(f"Previous model: {prev_model}")
  488. if 'mock' not in request.form:
  489. f = app.openai_client.files.create(
  490. file=openai_file,
  491. purpose="fine-tune"
  492. )
  493. job = app.openai_client.fine_tuning.jobs.create(
  494. training_file=f.id,
  495. model=prev_model,
  496. hyperparameters={
  497. "n_epochs": 5
  498. }
  499. )
  500. app.logger.info({"mock": "no", "status": job.status, "job_id": job.id})
  501. retval = {"status": job.status, "job_id": job.id}
  502. return retval
  503. else:
  504. app.logger.info({"mock": "yes", "status": "ok"})
  505. return {"status": "ok"}
  506. else:
  507. app.logger.error({"status": "error", "message": "Training file not found"})
  508. return {"status": "error", "message": "Training file not found"}
  509. def train_with_id(job_id):
  510. try:
  511. time.sleep(3)
  512. job = app.openai_client.fine_tuning.jobs.retrieve(job_id)
  513. if job.fine_tuned_model is None:
  514. app.logger.info({"job_id": job_id, "status": job.status})
  515. return {"status": job.status}
  516. else:
  517. app.logger.info({"job_id": job_id, "status": job.status, "model_name": job.fine_tuned_model})
  518. return {"status": job.status, "model_name": job.fine_tuned_model}
  519. except Exception as error_print:
  520. app.logger.exception("error")
  521. return {"status": "Could not find job from id"}
  522. @app.route('/assistant/create', methods=['POST'])
  523. def assistant_create():
  524. model_name = "gpt-4o-mini"
  525. assistant_name = "Assistant"
  526. assistant_ins = "Please respond professionally and in a friendly manner, using the same language as the original request."
  527. if request.is_json:
  528. request_form = request.json
  529. else:
  530. request_form = request.form.copy()
  531. assistant_name = request_form.pop('name', assistant_name)
  532. assistant_ins = request_form.pop('instructions', assistant_ins)
  533. model_name = request_form.pop('model_name', model_name)
  534. vector_store_id = request_form.pop('vector_store_id', "")
  535. file_batch_id = ""
  536. try:
  537. temperature = float(request_form.pop('temperature', 1.0))
  538. if temperature < 0.0:
  539. temperature = 0.0
  540. elif temperature > 1.0:
  541. temperature = 1.0
  542. except ValueError:
  543. temperature = 1.0
  544. tool_resources = {"tool_resources": {"file_search": {"vector_store_ids": [vector_store_id]}}} \
  545. if vector_store_id \
  546. else {}
  547. try:
  548. time.sleep(3)
  549. assistant = app.openai_client.beta.assistants.create(
  550. name=assistant_name,
  551. instructions=assistant_ins,
  552. model=model_name,
  553. tools=[{"type": "file_search"}],
  554. temperature=temperature,
  555. **tool_resources,
  556. **request_form
  557. )
  558. if 'attachment1' in request.files and not vector_store_id:
  559. resp_att = assistant_att()
  560. retval = {}
  561. if resp_att['status'] == 'completed':
  562. resp_upd = assistant_update(assistant.id, resp_att['vector_store_id'])
  563. assistant_updated = "1" if resp_upd['status'] == 'ok' else "0"
  564. else:
  565. assistant_updated = "0"
  566. if 'vector_store_id' in resp_att:
  567. retval['vector_store_id'] = resp_att['vector_store_id']
  568. if 'file_batch_id' in resp_att:
  569. retval['file_batch_id'] = resp_att['file_batch_id']
  570. retval['status'] = "ok"
  571. retval['assistant_id'] = assistant.id
  572. retval['assistant_updated'] = assistant_updated
  573. return retval
  574. else:
  575. return {"status": "ok", "assistant_id": assistant.id, "assistant_updated": "1" if vector_store_id else "0"}
  576. except ValueError as e:
  577. app.logger.exception("error")
  578. return {"status": "error",
  579. "message": "Failed to create assistant, please check whether your parameters are correct"}
  580. except openai.NotFoundError as e:
  581. app.logger.exception("error")
  582. return {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
  583. except Exception:
  584. app.logger.exception("error")
  585. return {"status": "error", "message": "Failed to create assistant, please try again"}, 405
  586. @app.route('/assistant/attachment', methods=['POST'])
  587. def assistant_att():
  588. vector_store_id = request.form.get('vector_store_id', '')
  589. file_batch_id = request.form.get('file_batch_id', '')
  590. attachments: list[str] = []
  591. try:
  592. if not file_batch_id:
  593. if 'attachment1' not in request.files:
  594. return {"status": "error", "message": "No file for attachments"}
  595. else:
  596. has_attachments = True
  597. n = 1
  598. while has_attachments:
  599. if f'attachment{n}' in request.files:
  600. retf = app.openai_client.files.create(
  601. file=(request.files[f'attachment{n}'].filename,
  602. request.files[f'attachment{n}'].read()),
  603. purpose="assistants"
  604. )
  605. retf.filename = request.files[f'attachment{n}'].filename
  606. attachments.append(retf.id)
  607. n = n + 1
  608. else:
  609. has_attachments = False
  610. if vector_store_id:
  611. vector_store = app.openai_client.beta.vector_stores.retrieve(vector_store_id=vector_store_id)
  612. else:
  613. vector_store = app.openai_client.beta.vector_stores.create(
  614. expires_after={
  615. "anchor": "last_active_at",
  616. "days": 365
  617. }
  618. )
  619. file_batch = app.openai_client.beta.vector_stores.file_batches.create_and_poll(
  620. vector_store_id=vector_store.id,
  621. file_ids=attachments
  622. )
  623. return {"status": file_batch.status, "vector_store_id": vector_store.id, "file_batch_id": file_batch.id}
  624. else:
  625. file_batch = app.openai_client.beta.vector_stores.file_batches.retrieve(file_batch_id,
  626. vector_store_id=vector_store_id)
  627. return {"status": file_batch.status}
  628. except Exception as e:
  629. app.logger.exception("error")
  630. return {"status": "error", "message": "Upload attachment failed, please try again"}
  631. @app.route('/assistant/attachment/update', methods=['POST'])
  632. def assistant_attachment_update():
  633. pass
  634. @app.route('/assistant/update', methods=['POST'])
  635. def assistant_update(aid=None, vid=None):
  636. try:
  637. request_form = request.form.copy()
  638. if aid is not None and vid is not None:
  639. assistant_id = aid
  640. vector_store_id = vid
  641. else:
  642. assistant_id = request_form.pop('assistant_id')
  643. vector_store_id = request_form.pop('vector_store_id', None)
  644. kwargs = {"assistant_id": assistant_id}
  645. if vector_store_id is not None:
  646. kwargs['tool_resources'] = {"file_search": {"vector_store_ids": [vector_store_id]}}
  647. if 'name' in request_form:
  648. kwargs['name'] = request_form.pop('name')
  649. if 'instructions' in request_form:
  650. kwargs['instructions'] = request_form.pop('instructions')
  651. time.sleep(3)
  652. app.openai_client.beta.assistants.update(**kwargs)
  653. return {"status": "ok"}
  654. except Exception as e:
  655. app.logger.exception("error")
  656. return {"status": "error", "message": "Update assistant failed, please try again"}
  657. @app.route('/llama', methods=['POST'])
  658. def llama():
  659. max_char_msg = 500
  660. max_resp_token = 600
  661. json_payload = request.get_json()
  662. if not json_payload:
  663. json_payload = []
  664. has_named_params = False
  665. if isinstance(json_payload, dict):
  666. has_named_params = 'payload' in json_payload
  667. if 'payload' in json_payload:
  668. json_payload = json_payload['payload']
  669. if isinstance(json_payload, dict):
  670. json_payload = [json_payload]
  671. else:
  672. json_payload = [json_payload]
  673. message = json_payload[-1]
  674. content = message['content']
  675. content_arr = content.split(" ")
  676. new_content_arr = content[:max_char_msg].split(" ")
  677. new_content_len = len(new_content_arr)
  678. arr = []
  679. for i in range(new_content_len):
  680. arr.append(content_arr[i])
  681. content = " ".join(arr)
  682. content = content + " Jawab dengan Bahasa Indonesia"
  683. try:
  684. json_request = {
  685. "model": "llama3.1",
  686. "prompt": content,
  687. "stream": False
  688. }
  689. r = requests.post("http://localhost:11434/api/generate", json=json_request)
  690. if r.status_code == 200:
  691. result = {
  692. "role": "assistant",
  693. "content": r.json()["response"]
  694. }
  695. else:
  696. result = {}, r.status_code
  697. except Exception as e:
  698. app.logger.exception("error")
  699. result = {"status": "error", "message": "Please try again"}, 405
  700. return result
  701. @app.route('/speech', methods=['POST'])
  702. def speech(text=""):
  703. time.sleep(3)
  704. if not text and 'text' not in request.form:
  705. audio_file = request.files.get('audio')
  706. res = app.openai_client.audio.transcriptions.create(
  707. model="whisper-1",
  708. file=(audio_file.filename, audio_file.stream.read())
  709. )
  710. return {"status": "ok", "message": res.text}
  711. elif 'text' in request.form or text:
  712. text = request.form['text'] if 'text' in request.form else text
  713. uu_id = str(uuid.uuid4())
  714. print(text)
  715. with app.openai_client.audio.speech.with_streaming_response.create(
  716. model="tts-1-hd",
  717. voice="echo",
  718. speed=0.8,
  719. input=text
  720. ) as res:
  721. res.stream_to_file(os.path.join(app.config['UPLOAD_FOLDER'], f"{uu_id}.mp3"))
  722. return download_file(f"{uu_id}.mp3")
  723. @app.route('/expression', methods=['POST'])
  724. def expresso(text=""):
  725. if not text:
  726. if 'text' in request.form:
  727. text = request.form['text']
  728. else:
  729. return {"status": "error", "message": "No text for expression"}
  730. try:
  731. response = app.openai_client.chat.completions.create(
  732. model="gpt-4o-mini",
  733. messages=[
  734. {
  735. "role": "user",
  736. "content": f"What is the closest expression of this text, choose between happy, sad, indifferent, fear, anger, surprise, or disgust, output json with key 'expression':\n\n{text}\n\n"
  737. }
  738. ],
  739. response_format={"type": "json_object"}
  740. )
  741. response_message = response.choices[0].message.content
  742. return json.loads(response_message)
  743. except:
  744. return {"expression": "indifferent"}
  745. # Press the green button in the gutter to run the script.
  746. if __name__ == '__main__':
  747. app.run(host='0.0.0.0', port=8348, debug=True, ssl_context=ssl)
  748. # See PyCharm help at https://www.jetbrains.com/help/pycharm/