|
@@ -6,6 +6,7 @@ import re
|
|
import sys
|
|
import sys
|
|
import traceback
|
|
import traceback
|
|
|
|
|
|
|
|
+import openai
|
|
import requests
|
|
import requests
|
|
from openai import OpenAI
|
|
from openai import OpenAI
|
|
from flask import Flask, request, jsonify, send_from_directory, url_for
|
|
from flask import Flask, request, jsonify, send_from_directory, url_for
|
|
@@ -75,7 +76,7 @@ def recommend(headlines, category):
|
|
)
|
|
)
|
|
return json.loads(json_response.choices[0].message.content)
|
|
return json.loads(json_response.choices[0].message.content)
|
|
except Exception as error_print:
|
|
except Exception as error_print:
|
|
- app.logger.error(error_print)
|
|
|
|
|
|
+ app.logger.exception("error")
|
|
result = {}, 405
|
|
result = {}, 405
|
|
|
|
|
|
|
|
|
|
@@ -107,7 +108,7 @@ def vision(message, image_url=None, image_b64=None):
|
|
)
|
|
)
|
|
return {"role": "assistant", "content": json_response.choices[0].message.content}
|
|
return {"role": "assistant", "content": json_response.choices[0].message.content}
|
|
except Exception as error_print:
|
|
except Exception as error_print:
|
|
- app.logger.error(error_print)
|
|
|
|
|
|
+ app.logger.exception("error")
|
|
result = {}, 405
|
|
result = {}, 405
|
|
|
|
|
|
|
|
|
|
@@ -293,8 +294,14 @@ def gpt():
|
|
elif isinstance(json_response_dict, str):
|
|
elif isinstance(json_response_dict, str):
|
|
json_response_dict = [json_response_dict]
|
|
json_response_dict = [json_response_dict]
|
|
result["predict_q"] = json_response_dict
|
|
result["predict_q"] = json_response_dict
|
|
|
|
+ except openai.APITimeoutError as error_print:
|
|
|
|
+ app.logger.exception("error")
|
|
|
|
+ result = {"status": "error", "message": error_print.message}, 408
|
|
|
|
+ except openai.NotFoundError as error_print:
|
|
|
|
+ app.logger.exception("error")
|
|
|
|
+ result = {"status": "error", "message": error_print.message}, error_print.status_code
|
|
except Exception as error_print:
|
|
except Exception as error_print:
|
|
- app.logger.error(error_print)
|
|
|
|
|
|
+ app.logger.exception("error")
|
|
result = {}, 405
|
|
result = {}, 405
|
|
return result
|
|
return result
|
|
|
|
|
|
@@ -350,7 +357,7 @@ def train_with_id(job_id):
|
|
app.logger.info({"job_id": job_id, "status": job.status, "model_name": job.fine_tuned_model})
|
|
app.logger.info({"job_id": job_id, "status": job.status, "model_name": job.fine_tuned_model})
|
|
return {"status": job.status, "model_name": job.fine_tuned_model}
|
|
return {"status": job.status, "model_name": job.fine_tuned_model}
|
|
except Exception as error_print:
|
|
except Exception as error_print:
|
|
- app.logger.error(error_print)
|
|
|
|
|
|
+ app.logger.exception("error")
|
|
return {"status": "Could not find job from id"}
|
|
return {"status": "Could not find job from id"}
|
|
|
|
|
|
|
|
|
|
@@ -492,7 +499,7 @@ def llama():
|
|
else:
|
|
else:
|
|
result = {}, r.status_code
|
|
result = {}, r.status_code
|
|
except Exception as error_print:
|
|
except Exception as error_print:
|
|
- app.logger.error(error_print)
|
|
|
|
|
|
+ app.logger.exception("error")
|
|
result = {}, 405
|
|
result = {}, 405
|
|
return result
|
|
return result
|
|
|
|
|