import datetime import logging import os import json import re import time import uuid import random import openai import requests from openai import OpenAI from flask import Flask, request, jsonify, send_from_directory, url_for from convert import alpaca_to_chatgpt, csv_to_jsonl app = Flask(__name__) ssl = None # ssl =('/etc/ssl/sample.crt', '/etc/ssl/sample.pem') app.openai_key = os.environ.get("OPENAI_KEY", "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") app.openai_client = OpenAI(api_key=app.openai_key) #logging.basicConfig(level=logging.DEBUG, filename='/jkt-disk-01/app/mms/chatgpt-apache/chatgpt.log', format='%(asctime)s %(message)s') app.chat_messages = [ {"role": "system", "content": "Please respond professionally and in a friendly manner, using the same language as the original request. Use emoji responsibly."} ] app.translate_messages = [ {"role": "system", "content": "Please translate using the requested language."} ] app.suggest_messages = [ {"role": "system", "content": "Please suggest reply messages based on the previous conversations and the user's request."} ] app.recommend_messages = [ {"role": "system", "content": "Give normalized total weight of each category in json based on headlines" } ] app.summary_messages = [ {"role": "system", "content": "Please summarize an article." } ] UPLOAD_FOLDER = 'files' app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER @app.route('/files/') def download_file(name): return send_from_directory(app.config["UPLOAD_FOLDER"], name) @app.route('/', methods=['GET', 'POST']) def test(): return jsonify({"status": "0"}) def roulette() -> str: roulette_arr = [(80, "gpt-4o-mini"), (20, "gpt-4o")] rand_num = random.randrange(0, 99) model_name = "" n = 0 j = 0 while rand_num >= n: n += roulette_arr[j][0] model_name = roulette_arr[j][1] print(model_name) j += 1 return model_name def prune_message(message: dict): m = message.copy() for k in list(m.keys()): if k != 'role' and k != 'content': m.pop(k) return m def recommend(headlines, category): chat_messages = app.recommend_messages.copy() try: json_payload = { "role": "user", "content": f"""{headlines} Berikan nilai berat masing-masing kategori, jumlahkan dan normalisasikan: {category} Berikan dalam bentuk json """ } chat_messages.append(json_payload) time.sleep(3) json_response = app.openai_client.chat.completions.create(model="gpt-4o-mini", messages=chat_messages, response_format={"type": "json_object"} ) result = {"status": "ok", "message": json.loads(json_response.choices[0].message.content)} except openai.APITimeoutError as e: app.logger.exception("error") result = {"status": "error", "message": e.message}, 408 except openai.NotFoundError as e: app.logger.exception("error") result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code except Exception as error_print: app.logger.exception("error") result = {"status": "error", "message": "Please try again"}, 405 return result def vision(message, image_url=None, image_b64=None): chat_messages = app.chat_messages.copy() url = "" if image_url: url = f"{image_url}" elif image_b64: url = f"data:image/jpeg;base64,{image_b64}" try: json_payload = { "role": "user", "content": [ {"type": "text", "text": message}, { "type": "image_url", "image_url": { "url": url, }, }, ], } chat_messages.append(json_payload) time.sleep(3) json_response = app.openai_client.chat.completions.create( model="gpt-4o", messages=chat_messages, max_tokens=500 ) result = {"role": "assistant", "content": json_response.choices[0].message.content} except openai.APITimeoutError as e: app.logger.exception("error") result = {"status": "error", "message": e.message}, 408 except openai.NotFoundError as e: app.logger.exception("error") result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code except Exception as e: app.logger.exception("error") result = {"status": "error", "message": "Please try again"}, 405 return result def suggest_schedule(sched: list, message: list, assistant_id=None, timestamp=None): result = {} if timestamp is None: timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") message[-1]["content"] = f"{message[-1]['content']} Sekarang jam {timestamp}. Berikut contoh form nya jika ingin mengisi meeting:\nTitle:\nDeskripsi:\nWaktu Mulai:\nWaktu Selesai:\nLokasi:\nPartisipan: @User1 @User2 @User3\n\nBerikut jadwalnya:\n{json.dumps(sched)}" try: if assistant_id: runs = app.openai_client.beta.threads.create_and_run_poll( assistant_id=assistant_id, thread={ "messages": message } ) if runs.status == "completed": messages = list(app.openai_client.beta.threads.messages.list(thread_id=runs.thread_id, run_id=runs.id)) message_content = messages[0].content[0].text app.logger.info(message_content.value) pattern = re.compile(r"【\d+:\d+†\(?source\)?】") filtered_message = pattern.sub("", message_content.value) result = {"role": "assistant", "content": filtered_message} else: json_response = app.openai_client.chat.completions.create(model="gpt-4o", messages=message, temperature=0.7) response_message = json_response.choices[0].message.content result = {"role": "assistant", "content": response_message} except openai.APITimeoutError as e: app.logger.exception("error") result = {"status": "error", "message": e.message}, 408 except openai.NotFoundError as e: app.logger.exception("error") result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code except Exception as e: app.logger.exception("error") result = {"status": "error", "message": "Please try again"}, 405 return result def convert_to_schedule(message: list, sched: list = None): result = {} if sched is not None: message[-1][ "content"] = f'{message[-1]["content"]}\n\nBerikut jadwalnya:\n{sched}\n\n Jika tidak ada bentrok jadwal keluarkan menjadi format json seperti berikut:\n\n{{"start_time": "2025-10-01 11:00", "end_time": "2025-10-01 13:00", "title": "Judul kegiatan", "description": "Deskripsi kegiatan", "location": "Lokasi kegiatan" }}\n\nJika bentrok keluarkan informasi dalam bentuk json sebagai berikut:\n\n{{"role": "assistant", "content": "Contoh pesan jadwal bentrok"}}' else: message[-1]["content"] = f'{message[-1]["content"]} Ubah menjadi format json seperti berikut:\n\n{{"start_time": "2025-10-01 11:00", "end_time": "2025-10-01 13:00", "title": "Judul Kegiatan", "description": "Deskripsi kegiatan", "location": "Lokasi kegiatan" }}' try: json_response = app.openai_client.chat.completions.create( model="gpt-4o", messages=message, temperature=0.2, response_format={"type": "json_object"} ) response_message = json_response.choices[0].message.content result = json.loads(response_message) except openai.APITimeoutError as e: app.logger.exception("error") result = {"status": "error", "message": e.message}, 408 except openai.NotFoundError as e: app.logger.exception("error") result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code except Exception as e: app.logger.exception("error") result = {"status": "error", "message": "Please try again"}, 405 return result @app.route('/gpt', methods=['POST']) def gpt(): assistant_id = "" assistant = None chat_messages = app.chat_messages.copy() chat_model = "gpt-4o-mini" use_video = False suggest = False summarize = False expression = False predict_q = 0 max_char_msg = 500 max_resp_token = 600 category = [] headlines = [] image_url = "" num_choices = 1 json_payload = request.get_json() if not json_payload: json_payload = [] has_named_params = False app.logger.info("Request: " + str(json_payload)) if isinstance(json_payload, dict): has_named_params = 'payload' in json_payload if 'payload' in json_payload: if 'predict_q' in json_payload: predict_q = 5 if json_payload['predict_q'] > 4 else 0 if json_payload['predict_q'] < 1 else \ json_payload['predict_q'] if 'num_choices' in json_payload: num_choices = 5 if json_payload['num_choices'] > 4 else 1 if json_payload['num_choices'] < 2 else \ json_payload['num_choices'] if 'use_video' in json_payload: use_video = json_payload['use_video'] == "1" if 'chat_model' in json_payload and 'assistant_id' not in json_payload: chat_model = json_payload['chat_model'] max_resp_token = 2048 if 'expression' in json_payload: expression = json_payload['expression'] == "1" if 'translate' in json_payload: chat_messages = app.translate_messages.copy() json_payload['payload'][-1]['content'] = json_payload['payload'][-1][ 'content'] + f" (Translate to {json_payload['translate']})" elif 'suggest' in json_payload: suggest = json_payload['suggest'] == "1" if suggest: chat_messages = app.suggest_messages.copy() else: chat_messages = app.chat_messages.copy() json_payload['payload'][-1]['content'] = json_payload['payload'][-1][ 'content'] + f" What can I say to him/her?" elif 'summarize' in json_payload: summarize = json_payload['summarize'] == "1" if summarize: chat_messages = app.summary_messages.copy() max_char_msg = 2000 max_resp_token = 1000 else: chat_messages = app.chat_messages.copy() json_payload['payload'][-1]['content'] = f"Please summarize this article:\n" + \ json_payload['payload'][-1]['content'] elif 'assistant_id' in json_payload: assistant_id = json_payload['assistant_id'] assistant = app.openai_client.beta.assistants.retrieve(assistant_id=assistant_id) chat_model = assistant.model else: chat_messages = app.chat_messages.copy() if 'schedule' in json_payload and 'convert_schedule' not in json_payload: timestamp = None if 'timestamp' in json_payload: timestamp = json_payload["timestamp"] sched = json_payload["schedule"].copy() message = json_payload["payload"].copy() return suggest_schedule(sched, message, assistant_id, timestamp) elif 'convert_schedule' in json_payload: sched = json_payload["schedule"].copy() if 'schedule' in json_payload else None if json_payload['convert_schedule'] == "1": return convert_to_schedule(json_payload['payload'].copy(), sched) json_payload = json_payload['payload'] if isinstance(json_payload, dict): json_payload = [json_payload] elif 'greeting' in json_payload: chat_messages = app.chat_messages.copy() company_name = json_payload['greeting']['company_name'] timestamp = json_payload['greeting']['timestamp'] islamic_message = f"Apakah Nama '{company_name}' terdapat unsur islami? Jawab dengan 'Ya' atau 'Tidak'" islam_messages = app.chat_messages.copy() islam_messages.append({ "role": "user", "content": islamic_message }) islamic_response = app.openai_client.chat.completions.create(model="gpt-4o-mini", messages=islam_messages, max_tokens=2, temperature=0.5) if 'Ya' in islamic_response.choices[0].message.content: greeting_message = f"Buatkan respons chatbot berupa greeting dari chat perusahaan bernama {company_name} pada jam {timestamp}, tidak perlu mention waktu, dan jawab dengan 'Assalamu'alaikum...' terlebih dahulu" else: greeting_message = f"Buatkan respons chatbot berupa greeting dari chat perusahaan bernama {company_name} pada jam {timestamp}, tidak perlu mention waktu" json_payload = [ { "role": "user", "content": greeting_message } ] elif 'recommend' in json_payload: headlines = json_payload['recommend']['headlines'] category = json_payload['recommend']['category'] return recommend(headlines, category) elif 'image_url' in json_payload: image = json_payload['image_url'] message = json_payload["message"] if 'message' in json_payload else "Ini gambar apa?" return vision(message, image_url=image) elif 'image_b64' in json_payload: image = json_payload['image_b64'] message = json_payload["message"] if 'message' in json_payload else "Ini gambar apa?" return vision(message, image_b64=image_url) else: app.logger.info("This request use old json format") chat_messages = app.chat_messages.copy() app.logger.info("Chat Messages:") app.logger.info(str(chat_messages)) json_payload = json_payload[-5:] if assistant_id: chat_messages = [] for message in json_payload: p_message = prune_message(message) if p_message['role'] == 'user': content = p_message['content'].lower() else: content = p_message['content'] content_arr = content.split(" ") new_content_arr = content[:max_char_msg].split(" ") new_content_len = len(new_content_arr) arr = [] for i in range(new_content_len): arr.append(content_arr[i]) p_message['content'] = " ".join(arr) chat_messages.append(p_message) app.logger.info(chat_messages) result = {} try: n = num_choices if "gpt-3.5-turbo" or "gpt-4o-mini" in chat_model: chat_model = roulette() app.logger.info(f"Model used: {chat_model}") if assistant_id and not suggest: runs = app.openai_client.beta.threads.create_and_run_poll( assistant_id=assistant_id, thread={ "messages": chat_messages } ) if runs.status != "completed": result = {"role": "assistant", "content": "Maaf, saat ini saya sedang sibuk. Coba beberapa saat lagi."} else: messages = list(app.openai_client.beta.threads.messages.list(thread_id=runs.thread_id, run_id=runs.id)) try: message_content = messages[0].content[0].text app.logger.info(message_content.value) pattern = re.compile(r"【\d+:\d+†\(?source\)?】") filtered_message = pattern.sub("", message_content.value) result = {"role": "assistant", "content": filtered_message} except IndexError: result = {"role": "assistant", "content": "Saat ini saya tidak memiliki informasi yang diperlukan untuk menjawab pertanyaan Anda."} else: time.sleep(3) json_response = app.openai_client.chat.completions.create(model=chat_model, messages=chat_messages, max_tokens=max_resp_token, temperature=0.7, n=n) app.logger.info(json_response.choices[0].message) if has_named_params: if suggest: choices = json_response.choices messages = [i.message for i in choices] json_formatted = [] for message in messages: json_formatted.append({"role": "assistant", "content": message.content}) result = {"url": "", "message": json_formatted} else: if use_video: # TODO: to be implemented result = {"url": url_for('download_file', name="test.mp4", _external=True), "message": {"role": "assistant", "content": json_response.choices[0].message.content}} else: result = {"role": "assistant", "content": json_response.choices[0].message.content} else: result = {"role": "assistant", "content": json_response.choices[0].message.content} if expression: exprr = expresso(text=result['content']) result['expression'] = exprr['expression'] if predict_q: json_response_q = None if assistant_id: query_q = { "role": "user", "content": f"Berikan {predict_q} pertanyaan random yang akan saya ajukan sesuai topik asisten dalam bentuk json array" } else: query_q = { "role": "user", "content": f"Berikan {predict_q} pertanyaan lain yang akan saya ajukan berdasarkan percakapan kali ini dalam bentuk json array" } chat_messages.append(prune_message(result)) chat_messages.append(query_q) if assistant_id: runs = app.openai_client.beta.threads.create_and_run_poll( assistant_id=assistant_id, thread={ "messages": chat_messages } ) if runs.status == "completed": messages = list(app.openai_client.beta.threads.messages.list(thread_id=runs.thread_id, run_id=runs.id)) message_content = messages[0].content[0].text app.logger.info(message_content.value) pattern = re.compile(r"【\d+:\d+†\(?source\)?】") filtered_message = pattern.sub("", message_content.value) predict_q_arr = [ { "role": "system", "content": assistant.instructions }, { "role": "assistant", "content": filtered_message }, { "role": "user", "content": f"Ekstrak {predict_q} pertanyaan tersebut dalam bentuk json array" } ] json_response_q = app.openai_client.chat.completions.create( model=chat_model, messages=predict_q_arr, temperature=0.2, response_format={"type": "json_object"} ) else: json_response_q = app.openai_client.chat.completions.create(model=chat_model, messages=chat_messages, max_tokens=max_resp_token, temperature=0.2, response_format={"type": "json_object"}) if json_response_q: json_response_dict = json.loads(json_response_q.choices[0].message.content) if json_response_dict is not None: if isinstance(json_response_dict, dict): if len(json_response_dict) > 1: qs = [] for q in json_response_dict.values(): qs.append(q) json_response_dict = qs else: try: first_key = next(iter(json_response_dict)) json_response_dict = json_response_dict[first_key] except StopIteration: json_response_dict = [] elif isinstance(json_response_dict, str): json_response_dict = [json_response_dict] result["predict_q"] = json_response_dict except openai.APITimeoutError as e: app.logger.exception("error") result = {"status": "error", "message": e.message}, 408 except openai.NotFoundError as e: app.logger.exception("error") result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code except Exception: app.logger.exception("error") result = {"status": "error", "message": "Please try again"}, 405 app.logger.info("Result: " + str(result)) return json.dumps(result) @app.route('/train', methods=['POST']) def train(): prev_model = "gpt-3.5-turbo" instructions = None if 'job_id' in request.form: return train_with_id(job_id=request.form['job_id']) elif 'train_file' in request.files: train_file = request.files['train_file'] app.logger.info({"filename": train_file.filename}) if 'instructions' in request.form: instructions = request.form['instructions'] openai_file = None if train_file.filename.split('.')[1] == 'jsonl': openai_file = train_file.stream.read() elif train_file.filename.split('.')[1] == 'csv': openai_file = csv_to_jsonl(train_file.stream.read(), instructions) elif train_file.filename.split('.')[1] == 'json': openai_file = alpaca_to_chatgpt(train_file, instructions) if 'prev_model' in request.form: prev_model = request.form['prev_model'] app.logger.info(f"Previous model: {prev_model}") if 'mock' not in request.form: f = app.openai_client.files.create( file=openai_file, purpose="fine-tune" ) job = app.openai_client.fine_tuning.jobs.create( training_file=f.id, model=prev_model, hyperparameters={ "n_epochs": 5 } ) app.logger.info({"mock": "no", "status": job.status, "job_id": job.id}) retval = {"status": job.status, "job_id": job.id} return retval else: app.logger.info({"mock": "yes", "status": "ok"}) return {"status": "ok"} else: app.logger.error({"status": "error", "message": "Training file not found"}) return {"status": "error", "message": "Training file not found"} def train_with_id(job_id): try: time.sleep(3) job = app.openai_client.fine_tuning.jobs.retrieve(job_id) if job.fine_tuned_model is None: app.logger.info({"job_id": job_id, "status": job.status}) return {"status": job.status} else: app.logger.info({"job_id": job_id, "status": job.status, "model_name": job.fine_tuned_model}) return {"status": job.status, "model_name": job.fine_tuned_model} except Exception as error_print: app.logger.exception("error") return {"status": "Could not find job from id"} @app.route('/assistant/create', methods=['POST']) def assistant_create(): model_name = "gpt-4o-mini" assistant_name = "Assistant" assistant_ins = "Please respond professionally and in a friendly manner, using the same language as the original request." if request.is_json: request_form = request.json else: request_form = request.form.copy() assistant_name = request_form.pop('name', assistant_name) assistant_ins = request_form.pop('instructions', assistant_ins) model_name = request_form.pop('model_name', model_name) vector_store_id = request_form.pop('vector_store_id', "") file_batch_id = "" try: temperature = float(request_form.pop('temperature', 1.0)) if temperature < 0.0: temperature = 0.0 elif temperature > 1.0: temperature = 1.0 except ValueError: temperature = 1.0 tool_resources = {"tool_resources": {"file_search": {"vector_store_ids": [vector_store_id]}}} \ if vector_store_id \ else {} try: time.sleep(3) assistant = app.openai_client.beta.assistants.create( name=assistant_name, instructions=assistant_ins, model=model_name, tools=[{"type": "file_search"}], temperature=temperature, **tool_resources, **request_form ) if 'attachment1' in request.files and not vector_store_id: resp_att = assistant_att() retval = {} if resp_att['status'] == 'completed': resp_upd = assistant_update(assistant.id, resp_att['vector_store_id']) assistant_updated = "1" if resp_upd['status'] == 'ok' else "0" else: assistant_updated = "0" if 'vector_store_id' in resp_att: retval['vector_store_id'] = resp_att['vector_store_id'] if 'file_batch_id' in resp_att: retval['file_batch_id'] = resp_att['file_batch_id'] retval['status'] = "ok" retval['assistant_id'] = assistant.id retval['assistant_updated'] = assistant_updated return retval else: return {"status": "ok", "assistant_id": assistant.id, "assistant_updated": "1" if vector_store_id else "0"} except ValueError as e: app.logger.exception("error") return {"status": "error", "message": "Failed to create assistant, please check whether your parameters are correct"} except openai.NotFoundError as e: app.logger.exception("error") return {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code except Exception: app.logger.exception("error") return {"status": "error", "message": "Failed to create assistant, please try again"}, 405 @app.route('/assistant/attachment', methods=['POST']) def assistant_att(): vector_store_id = request.form.get('vector_store_id', '') file_batch_id = request.form.get('file_batch_id', '') attachments: list[str] = [] try: if not file_batch_id: if 'attachment1' not in request.files: return {"status": "error", "message": "No file for attachments"} else: has_attachments = True n = 1 while has_attachments: if f'attachment{n}' in request.files: retf = app.openai_client.files.create( file=(request.files[f'attachment{n}'].filename, request.files[f'attachment{n}'].read()), purpose="assistants" ) retf.filename = request.files[f'attachment{n}'].filename attachments.append(retf.id) n = n + 1 else: has_attachments = False if vector_store_id: vector_store = app.openai_client.beta.vector_stores.retrieve(vector_store_id=vector_store_id) else: vector_store = app.openai_client.beta.vector_stores.create( expires_after={ "anchor": "last_active_at", "days": 365 } ) file_batch = app.openai_client.beta.vector_stores.file_batches.create_and_poll( vector_store_id=vector_store.id, file_ids=attachments ) return {"status": file_batch.status, "vector_store_id": vector_store.id, "file_batch_id": file_batch.id} else: file_batch = app.openai_client.beta.vector_stores.file_batches.retrieve(file_batch_id, vector_store_id=vector_store_id) return {"status": file_batch.status} except Exception as e: app.logger.exception("error") return {"status": "error", "message": "Upload attachment failed, please try again"} @app.route('/assistant/attachment/update', methods=['POST']) def assistant_attachment_update(): pass @app.route('/assistant/update', methods=['POST']) def assistant_update(aid=None, vid=None): try: request_form = request.form.copy() if aid is not None and vid is not None: assistant_id = aid vector_store_id = vid else: assistant_id = request_form.pop('assistant_id') vector_store_id = request_form.pop('vector_store_id', None) kwargs = {"assistant_id": assistant_id} if vector_store_id is not None: kwargs['tool_resources'] = {"file_search": {"vector_store_ids": [vector_store_id]}} if 'name' in request_form: kwargs['name'] = request_form.pop('name') if 'instructions' in request_form: kwargs['instructions'] = request_form.pop('instructions') time.sleep(3) app.openai_client.beta.assistants.update(**kwargs) return {"status": "ok"} except Exception as e: app.logger.exception("error") return {"status": "error", "message": "Update assistant failed, please try again"} @app.route('/llama', methods=['POST']) def llama(): max_char_msg = 500 max_resp_token = 600 json_payload = request.get_json() if not json_payload: json_payload = [] has_named_params = False if isinstance(json_payload, dict): has_named_params = 'payload' in json_payload if 'payload' in json_payload: json_payload = json_payload['payload'] if isinstance(json_payload, dict): json_payload = [json_payload] else: json_payload = [json_payload] message = json_payload[-1] content = message['content'] content_arr = content.split(" ") new_content_arr = content[:max_char_msg].split(" ") new_content_len = len(new_content_arr) arr = [] for i in range(new_content_len): arr.append(content_arr[i]) content = " ".join(arr) content = content + " Jawab dengan Bahasa Indonesia" try: json_request = { "model": "llama3.1", "prompt": content, "stream": False } r = requests.post("http://localhost:11434/api/generate", json=json_request) if r.status_code == 200: result = { "role": "assistant", "content": r.json()["response"] } else: result = {}, r.status_code except Exception as e: app.logger.exception("error") result = {"status": "error", "message": "Please try again"}, 405 return result @app.route('/speech', methods=['POST']) def speech(text=""): time.sleep(3) if not text and 'text' not in request.form: audio_file = request.files.get('audio') res = app.openai_client.audio.transcriptions.create( model="whisper-1", file=(audio_file.filename, audio_file.stream.read()) ) return {"status": "ok", "message": res.text} elif 'text' in request.form or text: text = request.form['text'] if 'text' in request.form else text uu_id = str(uuid.uuid4()) print(text) with app.openai_client.audio.speech.with_streaming_response.create( model="tts-1-hd", voice="echo", speed=0.8, input=text ) as res: res.stream_to_file(os.path.join(app.config['UPLOAD_FOLDER'], f"{uu_id}.mp3")) return download_file(f"{uu_id}.mp3") @app.route('/expression', methods=['POST']) def expresso(text=""): if not text: if 'text' in request.form: text = request.form['text'] else: return {"status": "error", "message": "No text for expression"} try: response = app.openai_client.chat.completions.create( model="gpt-4o-mini", messages=[ { "role": "user", "content": f"What is the closest expression of this text, choose between happy, sad, indifferent, fear, anger, surprise, or disgust, output json with key 'expression':\n\n{text}\n\n" } ], response_format={"type": "json_object"} ) response_message = response.choices[0].message.content return json.loads(response_message) except: return {"expression": "indifferent"} # Press the green button in the gutter to run the script. if __name__ == '__main__': app.run(host='0.0.0.0', port=8348, debug=True, ssl_context=ssl) # See PyCharm help at https://www.jetbrains.com/help/pycharm/