|
@@ -1,10 +1,7 @@
|
|
|
-import io
|
|
|
import logging
|
|
|
import os
|
|
|
import json
|
|
|
import re
|
|
|
-import sys
|
|
|
-import traceback
|
|
|
|
|
|
import openai
|
|
|
import requests
|
|
@@ -300,7 +297,7 @@ def gpt():
|
|
|
except openai.NotFoundError as error_print:
|
|
|
app.logger.exception("error")
|
|
|
result = {"status": "error", "message": error_print.message}, error_print.status_code
|
|
|
- except Exception as error_print:
|
|
|
+ except Exception:
|
|
|
app.logger.exception("error")
|
|
|
result = {}, 405
|
|
|
return result
|
|
@@ -366,7 +363,8 @@ def assistant_create():
|
|
|
model_name = "gpt-3.5-turbo"
|
|
|
assistant_name = "Assistant"
|
|
|
assistant_ins = "Please respond professionally and in a friendly manner, using the same language as the original request."
|
|
|
- assistant = None
|
|
|
+ vector_store_id = ""
|
|
|
+ file_batch_id = ""
|
|
|
if request.is_json:
|
|
|
request_form = request.json
|
|
|
else:
|
|
@@ -384,12 +382,20 @@ def assistant_create():
|
|
|
)
|
|
|
if 'attachment1' in request.files:
|
|
|
resp_att = assistant_att()
|
|
|
+ retval = {}
|
|
|
if resp_att['status'] == 'completed':
|
|
|
resp_upd = assistant_update(assistant.id, resp_att['vector_store_id'])
|
|
|
assistant_updated = "1" if resp_upd['status'] == 'ok' else "0"
|
|
|
else:
|
|
|
assistant_updated = "0"
|
|
|
- return {"status": "ok", "assistant_id": assistant.id, "assistant_updated": assistant_updated}
|
|
|
+ if 'vector_store_id' in resp_att:
|
|
|
+ retval['vector_store_id'] = resp_att['vector_store_id']
|
|
|
+ if 'file_batch_id' in resp_att:
|
|
|
+ retval['file_batch_id'] = resp_att['file_batch_id']
|
|
|
+ retval['status'] = "ok"
|
|
|
+ retval['assistant_id'] = assistant.id
|
|
|
+ retval['assistant_updated'] = assistant_updated
|
|
|
+ return retval
|
|
|
else:
|
|
|
return {"status": "ok", "assistant_id": assistant.id, "assistant_updated": "0"}
|
|
|
except ValueError as e:
|
|
@@ -426,7 +432,12 @@ def assistant_att():
|
|
|
if vector_store_id:
|
|
|
vector_store = app.openai_client.beta.vector_stores.retrieve(vector_store_id=vector_store_id)
|
|
|
else:
|
|
|
- vector_store = app.openai_client.beta.vector_stores.create()
|
|
|
+ vector_store = app.openai_client.beta.vector_stores.create(
|
|
|
+ expires_after={
|
|
|
+ "anchor": "last_active_at",
|
|
|
+ "days": 365
|
|
|
+ }
|
|
|
+ )
|
|
|
file_batch = app.openai_client.beta.vector_stores.file_batches.create_and_poll(
|
|
|
vector_store_id=vector_store.id,
|
|
|
file_ids=attachments
|
|
@@ -443,16 +454,21 @@ def assistant_att():
|
|
|
@app.route('/assistant/update', methods=['POST'])
|
|
|
def assistant_update(aid=None, vid=None):
|
|
|
try:
|
|
|
+ request_form = request.form.copy()
|
|
|
if aid is not None and vid is not None:
|
|
|
assistant_id = aid
|
|
|
vector_store_id = vid
|
|
|
else:
|
|
|
- assistant_id = request.form['assistant_id']
|
|
|
- vector_store_id = request.form['vector_store_id']
|
|
|
- app.openai_client.beta.assistants.update(
|
|
|
- assistant_id=assistant_id,
|
|
|
- tool_resources={"file_search": {"vector_store_ids": [vector_store_id]}},
|
|
|
- )
|
|
|
+ assistant_id = request_form.pop('assistant_id')
|
|
|
+ vector_store_id = request_form.pop('vector_store_id', None)
|
|
|
+ kwargs = {"assistant_id": assistant_id}
|
|
|
+ if vector_store_id is not None:
|
|
|
+ kwargs['tool_resources'] = {"file_search": {"vector_store_ids": [vector_store_id]}}
|
|
|
+ if 'name' in request_form:
|
|
|
+ kwargs['name'] = request_form.pop('name')
|
|
|
+ if 'instructions' in request_form:
|
|
|
+ kwargs['instructions'] = request_form.pop('instructions')
|
|
|
+ app.openai_client.beta.assistants.update(**kwargs)
|
|
|
return {"status": "ok"}
|
|
|
except Exception as e:
|
|
|
app.logger.exception("error")
|