support api debug

pull/469/head
Jingyi 2024-01-26 11:16:46 +08:00
parent 270d565f5e
commit 533728efa8
4 changed files with 87 additions and 25 deletions

View File

@ -1,6 +1,5 @@
import logging
import gradio
import markdown
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
@ -8,26 +7,60 @@ logger.setLevel(logging.INFO)
class ApiLogger:
action = ""
file_path = ""
infer_id = ""
def __init__(self, action: str, append: bool = False, username: str = ""):
def __init__(self, action: str, append: bool = False, infer_id: str = ""):
self.action = action
file_path = f'extensions/stable-diffusion-aws-extension/{action}-{username}.txt'
self.infer_id = infer_id
self.file_path = f'outputs/{infer_id}.md'
self.file_path_html = f'outputs/{infer_id}.html'
if append is False:
self.file = open(file_path, 'w')
self.file = open(self.file_path, 'w')
self.file.write(f"# Inference Job API Request Process - {infer_id}\n")
else:
self.file = open(file_path, 'a')
self.file = open(self.file_path, 'a')
def req_log(self, sub_action: str, method: str, path: str, headers=None, data=None, params=None, response=None):
self.file.write(f"sub_action: {sub_action}\n")
self.file.write(f"method: {method}\n")
self.file.write(f"path: {path}\n")
self.file.write(f"## {sub_action}\n")
self.file.write(f"\n")
self.file.write(f"#### {method} {path}\n")
self.file.write(f"\n")
if headers:
self.file.write(f"headers: {headers}\n")
headers['x-api-key'] = '***'
self.file.write(f"#### headers: \n")
self.file.write(f"\n")
self.file.write(f"```\n")
self.file.write(f"{headers}\n")
self.file.write(f"```\n")
self.file.write(f"\n")
if data:
self.file.write(f"data: {data}\n")
self.file.write(f"#### data:\n")
self.file.write(f"\n")
self.file.write(f"```\n")
self.file.write(f"{data}\n")
self.file.write(f"```\n")
self.file.write(f"\n")
if params:
self.file.write(f"params: {params}\n")
self.file.write(f"#### params: \n")
self.file.write(f"\n")
self.file.write(f"```\n")
self.file.write(f"{params}\n")
self.file.write(f"```\n")
self.file.write(f"\n")
if response:
self.file.write(f"response: {response.json()}\n")
self.file.write("\n")
self.file.write(f"#### response:\n")
self.file.write(f"\n")
self.file.write(f"```\n")
self.file.write(f"{response.json()}\n")
self.file.write(f"```\n")
self.file.write("\n")
with open(self.file_path, 'r') as file:
file_content = file.read()
html = markdown.markdown(file_content)
with open(self.file_path_html, 'w') as html_file:
html_file.write(html)

View File

@ -44,10 +44,12 @@ class SimpleSagemakerInfer(InferManager):
inference_id = None
headers = {'x-api-key': api_key}
response = requests.post(f'{url}inferences', json=payload, headers=headers)
infer_id = ""
if 'data' in response.json():
infer_id = response.json()['data']['inference']['id']
api_logger = ApiLogger(
action='inference',
username=userid
infer_id=infer_id
)
api_logger.req_log(sub_action="CreateInference",
method='POST',
@ -62,12 +64,12 @@ class SimpleSagemakerInfer(InferManager):
upload_param_response = response.json()['data']
if 'inference' in upload_param_response and \
'api_params_s3_upload_url' in upload_param_response['inference']:
upload_s3_resp = requests.put(upload_param_response['inference']['api_params_s3_upload_url'],
data=sd_api_param_json)
api_params_s3_upload_url = upload_param_response['inference']['api_params_s3_upload_url']
upload_s3_resp = requests.put(api_params_s3_upload_url, data=sd_api_param_json)
upload_s3_resp.raise_for_status()
api_logger.req_log(sub_action="UploadParameter",
api_logger.req_log(sub_action="UploadParameterToS3",
method='PUT',
path=f's3_presign_url',
path=api_params_s3_upload_url,
data=sd_api_param_json)
inference_id = upload_param_response['inference']['id']
# start run infer

View File

@ -293,22 +293,22 @@ def get_inference_job(inference_job_id):
url = f'inferences/{inference_job_id}'
response = server_request(url)
logger.debug(f"get_inference_job response {response}")
username = ""
infer_id = ""
if 'data' in response.json():
if 'owner_group_or_role' in response.json()['data']:
username = response.json()['data']['owner_group_or_role'][0]
infer_id = response.json()['data']['InferenceJobId']
api_logger = ApiLogger(
action='inference',
append=True,
username=username
infer_id=infer_id
)
headers = {
"x-api-key": get_variable_from_json('api_token'),
"Content-Type": "application/json"
}
api_gateway_url = get_variable_from_json('api_gateway_url')
api_logger.req_log(sub_action="GetInferenceJob",
method='GET',
path=url,
path=f"{api_gateway_url}{url}",
headers=headers,
response=response)
return response.json()['data']
@ -1097,6 +1097,12 @@ def delete_inference_job(selected_value):
if resp.status_code != 204:
gr.Error(f"Error deleting inference: {resp.json()['message']}")
gr.Info(f"{inference_job_id} deleted successfully")
file_path = f"{os.getcwd()}/outputs/{inference_job_id}.md"
if os.path.exists(file_path):
os.remove(file_path)
file_path = f"{os.getcwd()}/outputs/{inference_job_id}.html"
if os.path.exists(file_path):
os.remove(file_path)
else:
gr.Warning('Please select a inference job to delete')
@ -1314,6 +1320,15 @@ def create_ui(is_img2img):
outputs=[]
)
api_inference_job_button = ToolButton(value='API', elem_id="api_inference_job")
api_inference_job_cwd = ToolButton(value=os.getcwd(), elem_id="api_inference_job_path", visible=False)
api_inference_job_button.click(
_js="download_inference_job_api_call",
fn=None,
inputs=[api_inference_job_cwd, inference_job_dropdown],
outputs=[]
)
with gr.Row():
def setup_inference_for_plugin(pr: gr.Request):

View File

@ -43,6 +43,18 @@ function delete_inference_job_confirm(inference_job_dropdown) {
return ["cancelled"];
}
function download_inference_job_api_call(cwd, inference_job_dropdown) {
inference = inference_job_dropdown.split("-->");
if (inference.length < 3) {
alert("Please select a valid inference job.")
return;
}
let file = cwd + "/outputs/" + inference[3] + '.html';
let file_url = window.location.origin + '/file=' + file;
window.open(file_url);
}
function delete_dataset_confirm(dataset_name) {
res = confirm("You are about to delete dataset. Do you want to continue?");
if (res === true) {