|
@@ -72,9 +72,15 @@ def recommend(headlines, category):
|
|
|
response_format={"type": "json_object"}
|
|
|
)
|
|
|
return json.loads(json_response.choices[0].message.content)
|
|
|
+ except openai.APITimeoutError as e:
|
|
|
+ app.logger.exception("error")
|
|
|
+ result = {"status": "error", "message": e.message}, 408
|
|
|
+ except openai.NotFoundError as e:
|
|
|
+ app.logger.exception("error")
|
|
|
+ result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
|
|
|
except Exception as error_print:
|
|
|
app.logger.exception("error")
|
|
|
- result = {}, 405
|
|
|
+ result = {"status": "error", "message": "Please try again"}, 405
|
|
|
|
|
|
|
|
|
def vision(message, image_url=None, image_b64=None):
|
|
@@ -104,9 +110,15 @@ def vision(message, image_url=None, image_b64=None):
|
|
|
max_tokens=500
|
|
|
)
|
|
|
return {"role": "assistant", "content": json_response.choices[0].message.content}
|
|
|
+ except openai.APITimeoutError as e:
|
|
|
+ app.logger.exception("error")
|
|
|
+ result = {"status": "error", "message": e.message}, 408
|
|
|
+ except openai.NotFoundError as e:
|
|
|
+ app.logger.exception("error")
|
|
|
+ result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
|
|
|
except Exception as error_print:
|
|
|
app.logger.exception("error")
|
|
|
- result = {}, 405
|
|
|
+ result = {"status": "error", "message": "Please try again"}, 405
|
|
|
|
|
|
|
|
|
@app.route('/gpt', methods=['POST'])
|
|
@@ -291,15 +303,15 @@ def gpt():
|
|
|
elif isinstance(json_response_dict, str):
|
|
|
json_response_dict = [json_response_dict]
|
|
|
result["predict_q"] = json_response_dict
|
|
|
- except openai.APITimeoutError as error_print:
|
|
|
+ except openai.APITimeoutError as e:
|
|
|
app.logger.exception("error")
|
|
|
- result = {"status": "error", "message": error_print.message}, 408
|
|
|
- except openai.NotFoundError as error_print:
|
|
|
+ result = {"status": "error", "message": e.message}, 408
|
|
|
+ except openai.NotFoundError as e:
|
|
|
app.logger.exception("error")
|
|
|
- result = {"status": "error", "message": error_print.message}, error_print.status_code
|
|
|
+ result = {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
|
|
|
except Exception:
|
|
|
app.logger.exception("error")
|
|
|
- result = {}, 405
|
|
|
+ result = {"status": "error", "message": "Please try again"}, 405
|
|
|
return result
|
|
|
|
|
|
|
|
@@ -363,8 +375,6 @@ def assistant_create():
|
|
|
model_name = "gpt-3.5-turbo"
|
|
|
assistant_name = "Assistant"
|
|
|
assistant_ins = "Please respond professionally and in a friendly manner, using the same language as the original request."
|
|
|
- vector_store_id = ""
|
|
|
- file_batch_id = ""
|
|
|
if request.is_json:
|
|
|
request_form = request.json
|
|
|
else:
|
|
@@ -372,15 +382,21 @@ def assistant_create():
|
|
|
assistant_name = request_form.pop('name', assistant_name)
|
|
|
assistant_ins = request_form.pop('instructions', assistant_ins)
|
|
|
model_name = request_form.pop('model_name', model_name)
|
|
|
+ vector_store_id = request_form.pop('vector_store_id', "")
|
|
|
+ file_batch_id = ""
|
|
|
+ tool_resources = {"tool_resources": {"file_search": {"vector_store_ids": [vector_store_id]}}} \
|
|
|
+ if vector_store_id \
|
|
|
+ else {}
|
|
|
try:
|
|
|
assistant = app.openai_client.beta.assistants.create(
|
|
|
name=assistant_name,
|
|
|
instructions=assistant_ins,
|
|
|
model=model_name,
|
|
|
tools=[{"type": "file_search"}],
|
|
|
+ **tool_resources,
|
|
|
**request_form
|
|
|
)
|
|
|
- if 'attachment1' in request.files:
|
|
|
+ if 'attachment1' in request.files and not vector_store_id:
|
|
|
resp_att = assistant_att()
|
|
|
retval = {}
|
|
|
if resp_att['status'] == 'completed':
|
|
@@ -397,12 +413,17 @@ def assistant_create():
|
|
|
retval['assistant_updated'] = assistant_updated
|
|
|
return retval
|
|
|
else:
|
|
|
- return {"status": "ok", "assistant_id": assistant.id, "assistant_updated": "0"}
|
|
|
+ return {"status": "ok", "assistant_id": assistant.id, "assistant_updated": "1" if vector_store_id else "0"}
|
|
|
except ValueError as e:
|
|
|
+ app.logger.exception("error")
|
|
|
return {"status": "error",
|
|
|
"message": "Failed to create assistant, please check whether your parameters are correct"}
|
|
|
+ except openai.NotFoundError as e:
|
|
|
+ app.logger.exception("error")
|
|
|
+ return {"status": "error", "message": json.loads(e.response.content)['error']['message']}, e.status_code
|
|
|
except Exception:
|
|
|
- return {"status": "error", "message": "Failed to create assistant, please try again"}
|
|
|
+ app.logger.exception("error")
|
|
|
+ return {"status": "error", "message": "Failed to create assistant, please try again"}, 405
|
|
|
|
|
|
|
|
|
@app.route('/assistant/attachment', methods=['POST'])
|
|
@@ -514,9 +535,9 @@ def llama():
|
|
|
}
|
|
|
else:
|
|
|
result = {}, r.status_code
|
|
|
- except Exception as error_print:
|
|
|
+ except Exception as e:
|
|
|
app.logger.exception("error")
|
|
|
- result = {}, 405
|
|
|
+ result = {"status": "error", "message": "Please try again"}, 405
|
|
|
return result
|
|
|
|
|
|
|