redesign test
|
|
@ -1,217 +0,0 @@
|
|||
import os
|
||||
import time
|
||||
import base64
|
||||
import uuid
|
||||
import json
|
||||
import io
|
||||
from PIL import Image
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
import boto3
|
||||
import sagemaker
|
||||
from sagemaker.predictor import Predictor
|
||||
from sagemaker.predictor_async import AsyncPredictor
|
||||
from sagemaker.serializers import JSONSerializer
|
||||
from sagemaker.deserializers import JSONDeserializer
|
||||
|
||||
def create_model(name, image_url, model_data_url):
|
||||
""" Create SageMaker model.
|
||||
Args:
|
||||
name (string): Name to label model with
|
||||
image_url (string): Registry path of the Docker image that contains the model algorithm
|
||||
model_data_url (string): URL of the model artifacts created during training to download to container
|
||||
Returns:
|
||||
(None)
|
||||
"""
|
||||
try:
|
||||
sagemaker.create_model(
|
||||
ModelName=name,
|
||||
PrimaryContainer={
|
||||
'Image': image_url,
|
||||
'ModelDataUrl': model_data_url
|
||||
},
|
||||
ExecutionRoleArn=EXECUTION_ROLE
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print('Unable to create model.')
|
||||
raise(e)
|
||||
|
||||
def create_endpoint_config(endpoint_config_name, s3_output_path, model_name, initial_instance_count, instance_type):
|
||||
""" Create SageMaker endpoint configuration.
|
||||
Args:
|
||||
endpoint_config_name (string): Name to label endpoint configuration with.
|
||||
s3_output_path (string): S3 location to upload inference responses to.
|
||||
model_name (string): The name of model to host.
|
||||
initial_instance_count (integer): Number of instances to launch initially.
|
||||
instance_type (string): the ML compute instance type.
|
||||
Returns:
|
||||
(None)
|
||||
"""
|
||||
try:
|
||||
sagemaker.create_endpoint_config(
|
||||
EndpointConfigName=endpoint_config_name,
|
||||
AsyncInferenceConfig={
|
||||
"OutputConfig": {
|
||||
"S3OutputPath": s3_output_path,
|
||||
# "NotificationConfig": {
|
||||
# "SuccessTopic": ASYNC_SUCCESS_TOPIC,
|
||||
# "ErrorTopic": ASYNC_ERROR_TOPIC
|
||||
# }
|
||||
}
|
||||
},
|
||||
ProductionVariants=[
|
||||
{
|
||||
'VariantName': 'prod',
|
||||
'ModelName': model_name,
|
||||
'InitialInstanceCount': initial_instance_count,
|
||||
'InstanceType': instance_type
|
||||
}
|
||||
]
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print('Unable to create endpoint configuration.')
|
||||
raise(e)
|
||||
|
||||
def create_endpoint(endpoint_name, config_name):
|
||||
""" Create SageMaker endpoint with input endpoint configuration.
|
||||
Args:
|
||||
endpoint_name (string): Name of endpoint to create.
|
||||
config_name (string): Name of endpoint configuration to create endpoint with.
|
||||
Returns:
|
||||
(None)
|
||||
"""
|
||||
try:
|
||||
sagemaker.create_endpoint(
|
||||
EndpointName=endpoint_name,
|
||||
EndpointConfigName=config_name
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print('Unable to create endpoint.')
|
||||
raise(e)
|
||||
|
||||
def describe_endpoint(name):
|
||||
""" Describe SageMaker endpoint identified by input name.
|
||||
Args:
|
||||
name (string): Name of SageMaker endpoint to describe.
|
||||
Returns:
|
||||
(dict)
|
||||
Dictionary containing metadata and details about the status of the endpoint.
|
||||
"""
|
||||
try:
|
||||
response = sagemaker.describe_endpoint(
|
||||
EndpointName=name
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print('Unable to describe endpoint.')
|
||||
raise(e)
|
||||
return response
|
||||
|
||||
|
||||
sagemaker = boto3.client('sagemaker')
|
||||
s3_resource = boto3.resource('s3')
|
||||
s3_client = boto3.client('s3')
|
||||
|
||||
load_dotenv()
|
||||
|
||||
EXECUTION_ROLE = os.environ['Role']
|
||||
# ASYNC_SUCCESS_TOPIC = os.environ["SNS_INFERENCE_SUCCESS"]
|
||||
# ASYNC_ERROR_TOPIC = os.environ["SNS_INFERENCE_ERROR"]
|
||||
S3_BUCKET_NAME = os.environ.get('S3_BUCKET_NAME')
|
||||
INFERENCE_ECR_IMAGE_URL = os.environ.get("INFERENCE_ECR_IMAGE_URL")
|
||||
|
||||
# deploy endpoint
|
||||
print(f"start deploy endpoint")
|
||||
|
||||
endpoint_deployment_id = "test-private-no-sd15-model"
|
||||
sagemaker_model_name = f"infer-model-{endpoint_deployment_id}"
|
||||
sagemaker_endpoint_config = f"infer-config-{endpoint_deployment_id}"
|
||||
sagemaker_endpoint_name = f"infer-endpoint-{endpoint_deployment_id}"
|
||||
|
||||
image_url = INFERENCE_ECR_IMAGE_URL
|
||||
model_data_url = f"s3://{S3_BUCKET_NAME}/data/model.tar.gz"
|
||||
|
||||
s3_output_path = f"s3://{S3_BUCKET_NAME}/sagemaker_output/"
|
||||
|
||||
initial_instance_count = 1
|
||||
instance_type = "ml.g5.2xlarge"
|
||||
|
||||
print('Creating model resource ...')
|
||||
|
||||
create_model(sagemaker_model_name, image_url, model_data_url)
|
||||
print('Creating endpoint configuration...')
|
||||
|
||||
create_endpoint_config(sagemaker_endpoint_config, s3_output_path, sagemaker_model_name, initial_instance_count, instance_type)
|
||||
print('There is no existing endpoint for this model. Creating new model endpoint...')
|
||||
|
||||
create_endpoint(sagemaker_endpoint_name, sagemaker_endpoint_config)
|
||||
|
||||
# wait until ep is deployed
|
||||
status = "Creating"
|
||||
|
||||
while status != "InService":
|
||||
status = describe_endpoint(sagemaker_endpoint_name)
|
||||
if status == "Failed" or status == "RollingBack":
|
||||
raise Exception(f"Error! endpoint in status {status}")
|
||||
print(f"Creating endpoint...")
|
||||
time.sleep(180)
|
||||
|
||||
# test current endpoint
|
||||
predictor = Predictor(sagemaker_endpoint_name)
|
||||
|
||||
# adjust time out time to 1 hour
|
||||
initial_args = {}
|
||||
|
||||
initial_args["InvocationTimeoutSeconds"]=3600
|
||||
|
||||
def get_uuid():
|
||||
uuid_str = str(uuid.uuid4())
|
||||
return uuid_str
|
||||
|
||||
predictor = AsyncPredictor(predictor, name=sagemaker_endpoint_name)
|
||||
predictor.serializer = JSONSerializer()
|
||||
predictor.deserializer = JSONDeserializer()
|
||||
|
||||
#paylod 1 for test
|
||||
|
||||
inference_id = get_uuid()
|
||||
prediction = predictor.predict_async(data=payload, initial_args=initial_args, inference_id=inference_id)
|
||||
output_location = prediction.output_path
|
||||
|
||||
def get_bucket_and_key(s3uri):
|
||||
pos = s3uri.find('/', 5)
|
||||
bucket = s3uri[5 : pos]
|
||||
key = s3uri[pos + 1 : ]
|
||||
return bucket, key
|
||||
|
||||
bucket, key = get_bucket_and_key(output_location)
|
||||
obj = s3_resource.Object(bucket, key)
|
||||
body = obj.get()['Body'].read().decode('utf-8')
|
||||
json_body = json.loads(body)
|
||||
|
||||
def decode_base64_to_image(encoding):
|
||||
if encoding.startswith("data:image/"):
|
||||
encoding = encoding.split(";")[1].split(",")[1]
|
||||
return Image.open(io.BytesIO(base64.b64decode(encoding)))
|
||||
|
||||
# save images
|
||||
for count, b64image in enumerate(json_body["images"]):
|
||||
image = decode_base64_to_image(b64image).convert("RGB")
|
||||
output = io.BytesIO()
|
||||
image.save(output, format="JPEG")
|
||||
# Upload the image to the S3 bucket
|
||||
s3_client.put_object(
|
||||
Body=output.getvalue(),
|
||||
Bucket=S3_BUCKET_NAME,
|
||||
Key=f"out/{inference_id}/result/image_{count}.jpg"
|
||||
)
|
||||
|
||||
# test for payload
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1 +0,0 @@
|
|||
# sys.path.append("../../Solution-api-test-framework")
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import json
|
||||
import requests
|
||||
import io
|
||||
import base64
|
||||
from PIL import Image, PngImagePlugin
|
||||
import time
|
||||
import os
|
||||
import sys
|
||||
sys.path.append("extensions/stable-diffusion-aws-extension/middleware_api/inference")
|
||||
from parse.parameter_parser import json_convert_to_payload
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# prepare payload
|
||||
task_type = 'img2img'
|
||||
payload_checkpoint_info = json.loads(os.environ['checkpoint_info'])
|
||||
|
||||
f = open("extensions/stable-diffusion-aws-extension/test/api_test/json_files/aigc.json")
|
||||
|
||||
params_dict = json.load(f)
|
||||
|
||||
payload = json_convert_to_payload(params_dict, payload_checkpoint_info, task_type)
|
||||
|
||||
print(payload.keys())
|
||||
|
||||
# call local api
|
||||
url = "http://127.0.0.1:8080"
|
||||
|
||||
response = requests.post(url=f'{url}/invocations', json=payload)
|
||||
|
||||
print(f"run time is {time.time()-start_time}")
|
||||
|
||||
print(f"response is {response}")
|
||||
|
||||
r = response.json()
|
||||
|
||||
id = 0
|
||||
for i in r['images']:
|
||||
image = Image.open(io.BytesIO(base64.b64decode(i.split(",",1)[0])))
|
||||
|
||||
png_payload = {
|
||||
"image": "data:image/png;base64," + i
|
||||
}
|
||||
response2 = requests.post(url=f'{url}/sdapi/v1/png-info', json=png_payload)
|
||||
|
||||
pnginfo = PngImagePlugin.PngInfo()
|
||||
pnginfo.add_text("parameters", response2.json().get("info"))
|
||||
image.save('output_%d.png'%id, pnginfo=pnginfo)
|
||||
id += 1
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
import json
|
||||
import boto3
|
||||
|
||||
s3_resource = boto3.resource('s3')
|
||||
|
||||
def get_bucket_and_key(s3uri):
|
||||
pos = s3uri.find('/', 5)
|
||||
bucket = s3uri[5 : pos]
|
||||
key = s3uri[pos + 1 : ]
|
||||
return bucket, key
|
||||
|
||||
output_location = "s3://stable-diffusion-aws-extension-aigcbucketa457cb49-1r9svjhqjplic/sagemaker_output/ef218ab7-098c-4231-b14e-3edafad67ebb.out"
|
||||
|
||||
bucket, key = get_bucket_and_key(output_location)
|
||||
obj = s3_resource.Object(bucket, key)
|
||||
body = obj.get()['Body'].read().decode('utf-8')
|
||||
json_body = json.loads(body)
|
||||
|
||||
print(f"caption is {type(json_body['caption'])}")
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
import json
|
||||
import requests
|
||||
import base64
|
||||
import time
|
||||
import os
|
||||
import sys
|
||||
sys.path.append("../../../middleware_api/inference")
|
||||
from parse.parameter_parser import json_convert_to_payload
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# prepare payload
|
||||
task_type = ''
|
||||
payload_checkpoint_info = json.loads(os.environ['checkpoint_info'])
|
||||
|
||||
f = open("../json_files/aigc.json")
|
||||
|
||||
params_dict = json.load(f)
|
||||
|
||||
payload = json_convert_to_payload(params_dict, payload_checkpoint_info, task_type)
|
||||
|
||||
print(payload.keys())
|
||||
|
||||
# # call local api
|
||||
# url = "http://localhost:8082"
|
||||
|
||||
# print("docker api test for clip:")
|
||||
|
||||
# with open("test.png", "rb") as img:
|
||||
# test_img = str(base64.b64encode(img.read()), 'utf-8')
|
||||
|
||||
# payload = {
|
||||
# "task": "interrogate_clip",
|
||||
# "interrogate_payload": {
|
||||
# "image":test_img,
|
||||
# "model":"clip"
|
||||
# }
|
||||
# }
|
||||
|
||||
# #
|
||||
# response = requests.post(url=f'{url}/invocations', json=payload)
|
||||
|
||||
# print(f"run time is {time.time()-start_time}")
|
||||
|
||||
# r = response.json()
|
||||
|
||||
# prompt_message = r["caption"]
|
||||
|
||||
# print(f"prompt message : {prompt_message}")
|
||||
|
||||
# print("docker api test for deepbooru:")
|
||||
|
||||
# payload = {
|
||||
# "task": "interrogate_deepbooru",
|
||||
# "interrogate_payload": {
|
||||
# "image":test_img,
|
||||
# "model":"deepdanbooru"
|
||||
# }
|
||||
# }
|
||||
|
||||
# #
|
||||
# response = requests.post(url=f'{url}/invocations', json=payload)
|
||||
|
||||
# print(f"run time is {time.time()-start_time}")
|
||||
|
||||
# r = response.json()
|
||||
|
||||
# prompt_message = r["caption"]
|
||||
|
||||
# print(f"prompt message : {prompt_message}")
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
import json
|
||||
import requests
|
||||
import io
|
||||
import base64
|
||||
from PIL import Image, PngImagePlugin
|
||||
import time
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
url = "http://127.0.0.1:7860"
|
||||
|
||||
print("webui api test for clip:")
|
||||
|
||||
with open("test.png", "rb") as img:
|
||||
test_img = str(base64.b64encode(img.read()), 'utf-8')
|
||||
|
||||
payload = {
|
||||
"image":test_img,
|
||||
"model":"clip"
|
||||
}
|
||||
|
||||
#
|
||||
response = requests.post(url=f'{url}/sdapi/v1/interrogate', json=payload)
|
||||
|
||||
print(f"run time is {time.time()-start_time}")
|
||||
|
||||
# print(f"response is {response}")
|
||||
|
||||
r = response.json()
|
||||
|
||||
prompt_message = r["caption"]
|
||||
|
||||
print(f"prompt message : {prompt_message}")
|
||||
|
||||
print("webui api test for deepbooru:")
|
||||
|
||||
payload = {
|
||||
"image":test_img,
|
||||
"model":"deepdanbooru"
|
||||
}
|
||||
|
||||
#
|
||||
response = requests.post(url=f'{url}/sdapi/v1/interrogate', json=payload)
|
||||
|
||||
print(f"run time is {time.time()-start_time}")
|
||||
|
||||
r = response.json()
|
||||
|
||||
prompt_message = r["caption"]
|
||||
|
||||
print(f"prompt message : {prompt_message}")
|
||||
|
Before Width: | Height: | Size: 130 KiB |
|
|
@ -1,116 +0,0 @@
|
|||
{
|
||||
"img2img_init_img": null,
|
||||
"img2img_sketch": null,
|
||||
"img2img_init_img_with_mask": null,
|
||||
"img2img_inpaint_color_sketch": null,
|
||||
"img2img_init_img_inpaint": null,
|
||||
"img2img_init_mask_inpaint": [],
|
||||
"script_txt2txt_xyz_plot_x_values": "",
|
||||
"script_txt2txt_xyz_plot_y_values": "",
|
||||
"script_txt2txt_xyz_plot_z_values": "",
|
||||
"script_txt2txt_prompt_matrix_different_seeds": false,
|
||||
"script_txt2txt_prompt_matrix_margin_size": "0",
|
||||
"script_txt2txt_prompt_matrix_put_at_start": false,
|
||||
"script_txt2txt_checkbox_iterate_every_line": false,
|
||||
"script_txt2txt_checkbox_iterate_all_lines": false,
|
||||
"script_txt2txt_xyz_plot_draw_legend": true,
|
||||
"script_txt2txt_xyz_plot_include_lone_images": false,
|
||||
"script_txt2txt_xyz_plot_include_sub_grids": false,
|
||||
"script_txt2txt_xyz_plot_margin_size": "0",
|
||||
"script_txt2txt_xyz_plot_no_fixed_seeds": false,
|
||||
"txt2img_batch_count": "1",
|
||||
"txt2img_batch_size": "1",
|
||||
"txt2img_cfg_scale": "7",
|
||||
"txt2img_denoising_strength": "0.7",
|
||||
"txt2img_enable_hr": false,
|
||||
"txt2img_height": "512",
|
||||
"txt2img_hires_steps": "0",
|
||||
"txt2img_hr_resize_x": "0",
|
||||
"txt2img_hr_resize_y": "0",
|
||||
"txt2img_hr_scale": "2",
|
||||
"txt2img_restore_faces": false,
|
||||
"txt2img_seed": "-1",
|
||||
"txt2img_seed_resize_from_h": "0",
|
||||
"txt2img_seed_resize_from_w": "0",
|
||||
"txt2img_steps": "20",
|
||||
"txt2img_subseed": "-1",
|
||||
"txt2img_subseed_show": false,
|
||||
"txt2img_subseed_strength": "0",
|
||||
"txt2img_tiling": false,
|
||||
"txt2img_width": "512",
|
||||
"script_list": "None",
|
||||
"script_txt2txt_xyz_plot_x_type": "Seed",
|
||||
"script_txt2txt_xyz_plot_x_value": "",
|
||||
"script_txt2txt_xyz_plot_y_type": "Nothing",
|
||||
"script_txt2txt_xyz_plot_y_value": "",
|
||||
"script_txt2txt_xyz_plot_z_type": "Nothing",
|
||||
"script_txt2txt_xyz_plot_z_value": "",
|
||||
"txt2img_hr_upscaler": "Latent",
|
||||
"txt2img_sampling_method": "Euler a",
|
||||
"txt2img_sampling_steps": "20",
|
||||
"sagemaker_endpoint": "",
|
||||
"sagemaker_stable_diffusion_checkpoint": "",
|
||||
"sagemaker_texual_inversion_model": "",
|
||||
"sagemaker_lora_model": "",
|
||||
"sagemaker_hypernetwork_model": "",
|
||||
"sagemaker_controlnet_model": "",
|
||||
"txt2img_controlnet_ControlNet_input_image": "",
|
||||
"controlnet_enable": false,
|
||||
"controlnet_lowVRAM_enable": false,
|
||||
"controlnet_pixel_perfect": false,
|
||||
"controlnet_allow_preview": false,
|
||||
"controlnet_preprocessor": "",
|
||||
"controlnet_model": "",
|
||||
"controlnet_weight": "",
|
||||
"controlnet_starting_control_step": "",
|
||||
"controlnet_ending_control_step": "",
|
||||
"controlnet_control_mode_balanced": false,
|
||||
"controlnet_control_mode_my_prompt_is_more_important": false,
|
||||
"controlnet_control_mode_controlnet_is_more_important": false,
|
||||
"controlnet_resize_mode_just_resize": false,
|
||||
"controlnet_resize_mode_Crop_and_Resize": false,
|
||||
"controlnet_resize_mode_Resize_and_Fill": false,
|
||||
"controlnet_loopback_automatically": false,
|
||||
"controlnet_preprocessor_resolution": "",
|
||||
"controlnet_canny_low_threshold": "",
|
||||
"controlnet_canny_high_threshold": "",
|
||||
"script_txt2txt_prompt_matrix_prompt_type_positive": true,
|
||||
"script_txt2txt_prompt_matrix_prompt_type_negative": false,
|
||||
"script_txt2txt_prompt_matrix_variations_delimiter_comma": true,
|
||||
"script_txt2txt_prompt_matrix_variations_delimiter_space": false,
|
||||
"script_txt2txt_prompts_from_file_or_textbox_prompt_txt": "",
|
||||
"txt2img_prompt": "",
|
||||
"txt2img_neg_prompt": "",
|
||||
"txt2img_styles": "",
|
||||
"aws_api_gateway_url": "",
|
||||
"aws_api_token": "",
|
||||
"img2img_prompt": "",
|
||||
"img2img_neg_prompt": "",
|
||||
"img2img_resize_mode_just_resize": true,
|
||||
"img2img_resize_mode_crop_and_resize": false,
|
||||
"img2img_resize_mode_resize_and_fill": false,
|
||||
"img2img_resize_mode_just_resize_latent_upscale": false,
|
||||
"img2img_sampling_method": "Euler a",
|
||||
"img2img_sampling_steps": "20",
|
||||
"img2img_restore_faces": false,
|
||||
"img2img_tiling": false,
|
||||
"img2img_width": "512",
|
||||
"img2img_height": "512",
|
||||
"img2img_batch_count": "1",
|
||||
"img2img_batch_size": "1",
|
||||
"img2img_cfg_scale": "7",
|
||||
"img2img_denoising_strength": "0.75",
|
||||
"img2img_seed": "-1",
|
||||
"img2img_subseed_show": false,
|
||||
"img2img_scale": "1",
|
||||
"img2img_mask_blur": "4",
|
||||
"img2img_mask_mode_inpaint_masked": true,
|
||||
"img2img_mask_mode_inpaint_not_masked": false,
|
||||
"img2img_inpainting_fill_fill": false,
|
||||
"img2img_inpainting_fill_original": true,
|
||||
"img2img_inpainting_fill_latent_noise": false,
|
||||
"img2img_inpainting_fill_latent_nothing": false,
|
||||
"img2img_inpaint_full_res_whole_picture": true,
|
||||
"img2img_inpaint_full_res_only_masked": false,
|
||||
"img2img_steps": "20"
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
{"img2img_init_img":null,"img2img_sketch":null,"img2img_init_img_with_mask":null,"img2img_inpaint_color_sketch":null,"img2img_init_img_inpaint":null,"img2img_init_mask_inpaint":[],"script_txt2txt_xyz_plot_x_values":"","script_txt2txt_xyz_plot_y_values":"","script_txt2txt_xyz_plot_z_values":"","script_txt2txt_prompt_matrix_different_seeds":false,"script_txt2txt_prompt_matrix_margin_size":"0","script_txt2txt_prompt_matrix_put_at_start":false,"script_txt2txt_checkbox_iterate_every_line":false,"script_txt2txt_checkbox_iterate_all_lines":false,"script_txt2txt_xyz_plot_draw_legend":true,"script_txt2txt_xyz_plot_include_lone_images":false,"script_txt2txt_xyz_plot_include_sub_grids":false,"script_txt2txt_xyz_plot_margin_size":"0","script_txt2txt_xyz_plot_no_fixed_seeds":false,"txt2img_batch_count":"1","txt2img_batch_size":"1","txt2img_cfg_scale":"7","txt2img_denoising_strength":"0.7","txt2img_enable_hr":false,"txt2img_height":"512","txt2img_hires_steps":"0","txt2img_hr_resize_x":"0","txt2img_hr_resize_y":"0","txt2img_hr_scale":"2","txt2img_restore_faces":false,"txt2img_seed":"-1","txt2img_seed_resize_from_h":"0","txt2img_seed_resize_from_w":"0","txt2img_steps":"20","txt2img_subseed":"-1","txt2img_subseed_show":false,"txt2img_subseed_strength":"0","txt2img_tiling":false,"txt2img_width":"512","script_list":"None","script_txt2txt_xyz_plot_x_type":"Seed","script_txt2txt_xyz_plot_x_value":"","script_txt2txt_xyz_plot_y_type":"Nothing","script_txt2txt_xyz_plot_y_value":"","script_txt2txt_xyz_plot_z_type":"Nothing","script_txt2txt_xyz_plot_z_value":"","txt2img_hr_upscaler":"Latent","txt2img_sampling_method":"Euler a","txt2img_sampling_steps":"20","sagemaker_endpoint":"","sagemaker_stable_diffusion_checkpoint":"","sagemaker_texual_inversion_model":"","sagemaker_lora_model":"","sagemaker_hypernetwork_model":"","sagemaker_controlnet_model":"","txt2img_controlnet_ControlNet_input_image":"","controlnet_enable":false,"controlnet_lowVRAM_enable":false,"controlnet_pixel_perfect":false,"controlnet_allow_preview":false,"controlnet_preprocessor":"","controlnet_model":"","controlnet_weight":"","controlnet_starting_control_step":"","controlnet_ending_control_step":"","controlnet_control_mode_balanced":false,"controlnet_control_mode_my_prompt_is_more_important":false,"controlnet_control_mode_controlnet_is_more_important":false,"controlnet_resize_mode_just_resize":false,"controlnet_resize_mode_Crop_and_Resize":false,"controlnet_resize_mode_Resize_and_Fill":false,"controlnet_loopback_automatically":false,"controlnet_preprocessor_resolution":"","controlnet_canny_low_threshold":"","controlnet_canny_high_threshold":"","script_txt2txt_prompt_matrix_prompt_type_positive":true,"script_txt2txt_prompt_matrix_prompt_type_negative":false,"script_txt2txt_prompt_matrix_variations_delimiter_comma":true,"script_txt2txt_prompt_matrix_variations_delimiter_space":false,"script_txt2txt_prompts_from_file_or_textbox_prompt_txt":"","txt2img_prompt":"","txt2img_neg_prompt":"","txt2img_styles":"","aws_api_gateway_url":"","aws_api_token":"","img2img_prompt":"","img2img_neg_prompt":"","img2img_resize_mode_just_resize":true,"img2img_resize_mode_crop_and_resize":false,"img2img_resize_mode_resize_and_fill":false,"img2img_resize_mode_just_resize_latent_upscale":false,"img2img_sampling_method":"Euler a","img2img_sampling_steps":"20","img2img_restore_faces":false,"img2img_tiling":false,"img2img_width":"512","img2img_height":"512","img2img_batch_count":"1","img2img_batch_size":"1","img2img_cfg_scale":"7","img2img_denoising_strength":"0.75","img2img_seed":"-1","img2img_subseed_show":false,"img2img_scale":"1","img2img_mask_blur":"4","img2img_mask_mode_inpaint_masked":true,"img2img_mask_mode_inpaint_not_masked":false,"img2img_inpainting_fill_fill":false,"img2img_inpainting_fill_original":true,"img2img_inpainting_fill_latent_noise":false,"img2img_inpainting_fill_latent_nothing":false,"img2img_inpaint_full_res_whole_picture":true,"img2img_inpaint_full_res_only_masked":false,"img2img_steps":"20"}
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
import dataclasses
|
||||
from typing import Optional, Any
|
||||
|
||||
|
||||
# a copy of aws_extensions.models.InvocationsRequest
|
||||
@dataclasses.dataclass
|
||||
class InvocationsRequest:
|
||||
task: str
|
||||
username: Optional[str]
|
||||
param_s3: str
|
||||
# checkpoint_info:Optional[dict]
|
||||
models: Optional[dict]
|
||||
# txt2img_payload: Optional[StableDiffusionTxt2ImgProcessingAPI]
|
||||
# img2img_payload: Optional[StableDiffusionImg2ImgProcessingAPI]
|
||||
# extras_single_payload: Optional[ExtrasSingleImageRequest]
|
||||
# extras_batch_payload: Optional[ExtrasBatchImagesRequest]
|
||||
# interrogate_payload: Optional[InterrogateRequest]
|
||||
# db_create_model_payload: Optional[str]
|
||||
# merge_checkpoint_payload: Optional[dict]
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
import json
|
||||
import requests
|
||||
import io
|
||||
import base64
|
||||
from PIL import Image, PngImagePlugin
|
||||
import time
|
||||
import os
|
||||
from gradio.processing_utils import encode_pil_to_base64
|
||||
import sys
|
||||
|
||||
sys.path.append("../../../middleware_api/inference")
|
||||
from parse.parameter_parser import json_convert_to_payload
|
||||
start_time = time.time()
|
||||
|
||||
url = "http://127.0.0.1:8083"
|
||||
# url = "http://0.0.0.0:8083"
|
||||
|
||||
aigc_json_file = "../json_files/txt2img_test.json"
|
||||
f = open(aigc_json_file)
|
||||
aigc_params = json.load(f)
|
||||
checkpoint_info = {'Stable-diffusion': {'v1-5-pruned-emaonly.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/Stable-diffusion/checkpoint/icon/062b8574-8380-49d3-a8c4-7d5cf8100bd8/v1-5-pruned-emaonly.safetensors', 'darkSushiMixMix_225D.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/Stable-diffusion/checkpoint/custom/b163f4da-2219-4d8e-9cea-6af34662a11b/darkSushiMixMix_225D.safetensors', 'dreamshaper_7.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/Stable-diffusion/checkpoint/custom/e0fb5452-ecdf-41bf-8c45-1b63383ae6bc/dreamshaper_7.safetensors', 'v2-1_768-ema-pruned.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/Stable-diffusion/checkpoint/custom/3e5f67ca-4f35-40fb-a513-d81f3aea75fe/v2-1_768-ema-pruned.safetensors', 'sd-v1-5-inpainting.ckpt': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/Stable-diffusion/checkpoint/custom/c05338af-98a2-424d-b8cb-33e808a2b007/sd-v1-5-inpainting.ckpt', 'sd_xl_refiner_1.0.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/Stable-diffusion/checkpoint/custom/18d6b6fa-e8c1-4d66-b14f-c4786271a7ba/sd_xl_refiner_1.0.safetensors', 'sd_xl_base_0.9.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/Stable-diffusion/checkpoint/custom/39b16a9a-2c72-4370-9075-d62606bef914/sd_xl_base_0.9.safetensors', 'sd_xl_base_1.0.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/Stable-diffusion/checkpoint/custom/7b24f656-c14c-47a7-9493-5b80652e44ec/sd_xl_base_1.0.safetensors'}, 'embeddings': {'corneo_marin_kitagawa.pt': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/embeddings/checkpoint/custom/f2477fd1-dcb1-4184-ae40-7aca77454b57/corneo_marin_kitagawa.pt'}, 'Lora': {'hanfu_v30Song.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/Lora/checkpoint/custom/86bac5b3-e30b-4de7-b33a-608ae3d7ced2/hanfu_v30Song.safetensors'}, 'hypernetworks': {'LuisapKawaii_v1.pt': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/hypernetworks/checkpoint/custom/12716ec7-6846-4c61-96d8-b6cdfb2bfbaf/LuisapKawaii_v1.pt'}, 'ControlNet': {'control_v11p_sd15_inpaint.pth': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-1dga2v0104mc2/ControlNet/checkpoint/custom/3e48115f-fb0c-4966-828d-384f80ea397c/control_v11p_sd15_inpaint.pth'}, 'sagemaker_endpoint': 'infer-endpoint-1574a8b', 'task_type': 'txt2img'}
|
||||
|
||||
task_type = 'txt2img'
|
||||
print(f"Task Type: {task_type}")
|
||||
payload = json_convert_to_payload(aigc_params, checkpoint_info, task_type)
|
||||
|
||||
model_list = []
|
||||
model_list.append("v1-5-pruned-emaonly.safetensors")
|
||||
# model_list.append("darkSushiMixMix_225D.safetensors")
|
||||
# model_list.append("sd-v1-5-inpainting.ckpt")
|
||||
# model_list.append("dreamshaper_7.safetensors")
|
||||
# model_list.append("v2-1_768-ema-pruned.safetensors")
|
||||
# model_list.append("v1-5-pruned-emaonly.safetensors")
|
||||
# model_list.append("darkSushiMixMix_225D.safetensors")
|
||||
# model_list.append("sd-v1-5-inpainting.ckpt")
|
||||
# model_list.append("dreamshaper_7.safetensors")
|
||||
# model_list.append("v2-1_768-ema-pruned.safetensors")
|
||||
# model_list.append("v1-5-pruned-emaonly.safetensors")
|
||||
# model_list.append("darkSushiMixMix_225D.safetensors")
|
||||
# model_list.append("sd-v1-5-inpainting.ckpt")
|
||||
# model_list.append("dreamshaper_7.safetensors")
|
||||
# model_list.append("v2-1_768-ema-pruned.safetensors")
|
||||
|
||||
import psutil
|
||||
# import gc
|
||||
|
||||
payload = {
|
||||
"prompt": "maltese puppy",
|
||||
"steps": 5
|
||||
}
|
||||
|
||||
for model in model_list:
|
||||
# payload["models"]["Stable-diffusion"]= [model]
|
||||
# response = requests.post(url=f'{url}/invocations', json=payload)
|
||||
response = requests.post(url=f'http://0.0.0.0:8083/sdapi/v1/txt2img', json=payload)
|
||||
|
||||
print(f'Model {model} RAM memory {psutil.virtual_memory()[2]} used: {psutil.virtual_memory()[3]/1000000000 } (GB)')
|
||||
|
||||
# gc.collect()
|
||||
|
||||
# r = response.json()
|
||||
# id = 0
|
||||
# for i in r['images']:
|
||||
# image = Image.open(io.BytesIO(base64.b64decode(i.split(",",1)[0])))
|
||||
|
||||
# png_payload = {
|
||||
# "image": "data:image/png;base64," + i
|
||||
# }
|
||||
# response2 = requests.post(url=f'{url}/sdapi/v1/png-info', json=png_payload)
|
||||
|
||||
# pnginfo = PngImagePlugin.PngInfo()
|
||||
# pnginfo.add_text("parameters", response2.json().get("info"))
|
||||
# image.save('output_%d.png'%id, pnginfo=pnginfo)
|
||||
# id += 1
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
import requests
|
||||
import time
|
||||
start_time = time.time()
|
||||
|
||||
url = "http://127.0.0.1:8080"
|
||||
|
||||
task_type = 'txt2img'
|
||||
print(f"Task Type: {task_type}")
|
||||
from _types import InvocationsRequest
|
||||
|
||||
def custom_serializer(obj):
|
||||
if isinstance(obj, InvocationsRequest):
|
||||
return obj.__dict__ # 将对象转换为字典
|
||||
raise TypeError("Object not serializable")
|
||||
|
||||
request = InvocationsRequest(task='txt2img', username='test',param_s3='mytestbuckets3/api_param.json',models={'space_free_size': 40000000000.0, 'Stable-diffusion': [{'s3': 's3testurlbucket/Stable-diffusion/checkpoint/custom/13fb3cd2-3d7c-41dd-8fcb-78cb8a86297e','id': '13fb3cd2-3d7c-41dd-8fcb-78cb8a86297e','model_name': 'abyssorangemix3AOM3_aom3.safetensors', 'type': 'Stable-diffusion'}]})
|
||||
|
||||
model_list = []
|
||||
|
||||
model_list.append("yangk-style_2160_lora.safetensors")
|
||||
|
||||
import psutil
|
||||
# import gc
|
||||
import time
|
||||
|
||||
for model in model_list:
|
||||
start_time = time.time()
|
||||
|
||||
# payload["models"]["Stable-diffusion"]= [model]
|
||||
response = requests.post(url=f'{url}/invocations', json=request.__dict__)
|
||||
|
||||
print(f'Model {model} Running Time {time.time()-start_time} s RAM memory {psutil.virtual_memory()[2]} used: {psutil.virtual_memory()[3]/1000000000 } (GB)')
|
||||
print(response.json())
|
||||
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
import json
|
||||
import requests
|
||||
import io
|
||||
import base64
|
||||
from PIL import Image, PngImagePlugin
|
||||
import time
|
||||
import os
|
||||
from gradio.processing_utils import encode_pil_to_base64
|
||||
import sys
|
||||
|
||||
sys.path.append("../../../middleware_api/inference")
|
||||
from parse.parameter_parser import json_convert_to_payload
|
||||
start_time = time.time()
|
||||
|
||||
url = "http://127.0.0.1:8080"
|
||||
|
||||
aigc_json_file = "../json_files/txt2img_test.json"
|
||||
f = open(aigc_json_file)
|
||||
aigc_params = json.load(f)
|
||||
checkpoint_info = {'Stable-diffusion': {'v2-1_768-ema-pruned.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-xfyck6nj4vlo/Stable-diffusion/checkpoint/custom/13896019-1ba4-478a-a5ec-b7e143e840ca/v2-1_768-ema-pruned.safetensors', 'meinamix_meinaV10.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-xfyck6nj4vlo/Stable-diffusion/checkpoint/custom/491803b4-8293-4604-b879-7b1d3fa8f1df/meinamix_meinaV10.safetensors', 'cheeseDaddys_41.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-xfyck6nj4vlo/Stable-diffusion/checkpoint/custom/6fc2a447-a2d6-427c-b520-fef0f4c5ce85/cheeseDaddys_41.safetensors', 'AnythingV5Ink_ink.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-xfyck6nj4vlo/Stable-diffusion/checkpoint/custom/1a0227fc-5bb0-436b-aa87-80d487a536b3/AnythingV5Ink_ink.safetensors', 'camelliamix25DV2_v2.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-xfyck6nj4vlo/Stable-diffusion/checkpoint/custom/2f5063ee-e2ac-40be-b48e-8762dfdc25eb/camelliamix25DV2_v2.safetensors', 'sd-v1-5-inpainting.ckpt': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-xfyck6nj4vlo/Stable-diffusion/checkpoint/custom/822a6754-87e7-495b-b71a-543cf78cefb2/sd-v1-5-inpainting.ckpt', 'yangk-style_2160_lora.safetensors': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-xfyck6nj4vlo/Stable-diffusion/checkpoint/custom/7a8ad4b0-0159-4c0d-a5b9-a6692f90902a/yangk-style_2160_lora.safetensors'}, 'embeddings': {}, 'Lora': {}, 'hypernetworks': {}, 'ControlNet': {'control_v11p_sd15_canny.pth': 's3://stable-diffusion-aws-extension-aigcbucketa457cb49-xfyck6nj4vlo/ControlNet/checkpoint/custom/a20edd04-535c-4d85-842b-95c3c743d819/control_v11p_sd15_canny.pth'}, 'sagemaker_endpoint': 'infer-endpoint-5d9775d', 'task_type': 'txt2img'}
|
||||
|
||||
task_type = 'txt2img'
|
||||
print(f"Task Type: {task_type}")
|
||||
payload = json_convert_to_payload(aigc_params, checkpoint_info, task_type)
|
||||
|
||||
model_list = []
|
||||
model_list.append("yk-mk-exp-1/yk-mk-exp-1_1200_lora.safetensors")
|
||||
model_list.append("piying-base-15-model/piying-base-15-model_900_lora.safetensors")
|
||||
model_list.append("PyAstronaut3/PyAstronaut3_900_lora.safetensors")
|
||||
model_list.append("stable-diffusion-inpainting/sd-v1-5-inpainting.ckpt")
|
||||
model_list.append("yangke-monkey/yangke-monkey_800_lora.safetensors")
|
||||
model_list.append("piying_xuanran_2703_lora.safetensors")
|
||||
model_list.append("yinke-style_1380_lora.safetensors")
|
||||
model_list.append("yangk-style_2160_lora.safetensors")
|
||||
model_list.append("v2-1_768-ema-pruned.safetensors")
|
||||
model_list.append("v1-5-pruned-emaonly.safetensors")
|
||||
|
||||
import psutil
|
||||
# import gc
|
||||
import time
|
||||
|
||||
for model in model_list:
|
||||
start_time = time.time()
|
||||
|
||||
payload["models"]["Stable-diffusion"]= [model]
|
||||
response = requests.post(url=f'{url}/invocations', json=payload)
|
||||
|
||||
print(f'Model {model} Running Time {time.time()-start_time} s RAM memory {psutil.virtual_memory()[2]} used: {psutil.virtual_memory()[3]/1000000000 } (GB)')
|
||||
|
||||
# gc.collect()
|
||||
|
||||
# r = response.json()
|
||||
# id = 0
|
||||
# for i in r['images']:
|
||||
# image = Image.open(io.BytesIO(base64.b64decode(i.split(",",1)[0])))
|
||||
|
||||
# png_payload = {
|
||||
# "image": "data:image/png;base64," + i
|
||||
# }
|
||||
# response2 = requests.post(url=f'{url}/sdapi/v1/png-info', json=png_payload)
|
||||
|
||||
# pnginfo = PngImagePlugin.PngInfo()
|
||||
# pnginfo.add_text("parameters", response2.json().get("info"))
|
||||
# image.save('output_%d.png'%id, pnginfo=pnginfo)
|
||||
# id += 1
|
||||
|
|
@ -16,7 +16,7 @@ phases:
|
|||
build:
|
||||
commands:
|
||||
- git clone https://github.com/awslabs/stable-diffusion-aws-extension --branch $CODE_BRANCH --single-branch
|
||||
- bash stable-diffusion-aws-extension/test/api/buildspec_build.sh
|
||||
- bash stable-diffusion-aws-extension/test/buildspec_build.sh
|
||||
post_build:
|
||||
commands:
|
||||
- bash stable-diffusion-aws-extension/test/api/buildspec_post_build.sh
|
||||
- bash stable-diffusion-aws-extension/test/buildspec_post_build.sh
|
||||
|
|
@ -56,7 +56,7 @@ echo "export API_GATEWAY_URL_TOKEN=$API_GATEWAY_URL_TOKEN" >> env.properties
|
|||
python --version
|
||||
sudo yum install wget -y
|
||||
|
||||
cd stable-diffusion-aws-extension/test/api
|
||||
cd stable-diffusion-aws-extension/test
|
||||
make build
|
||||
|
||||
echo "----------------------------------------------------------------"
|
||||
|
|
@ -8,7 +8,7 @@ if [ -z "$ACCOUNT_ID" ]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
cd stable-diffusion-aws-extension/test/api || exit
|
||||
cd stable-diffusion-aws-extension/test || exit
|
||||
|
||||
ls -la
|
||||
|
||||
|
Before Width: | Height: | Size: 233 KiB After Width: | Height: | Size: 233 KiB |
|
Before Width: | Height: | Size: 369 KiB After Width: | Height: | Size: 369 KiB |
|
Before Width: | Height: | Size: 99 KiB After Width: | Height: | Size: 99 KiB |
|
Before Width: | Height: | Size: 291 KiB After Width: | Height: | Size: 291 KiB |
|
Before Width: | Height: | Size: 423 KiB After Width: | Height: | Size: 423 KiB |
|
Before Width: | Height: | Size: 496 KiB After Width: | Height: | Size: 496 KiB |
|
Before Width: | Height: | Size: 405 KiB After Width: | Height: | Size: 405 KiB |
|
Before Width: | Height: | Size: 496 KiB After Width: | Height: | Size: 496 KiB |
|
Before Width: | Height: | Size: 1.4 MiB After Width: | Height: | Size: 1.4 MiB |
|
Before Width: | Height: | Size: 1.5 MiB After Width: | Height: | Size: 1.5 MiB |
|
Before Width: | Height: | Size: 1.3 MiB After Width: | Height: | Size: 1.3 MiB |
|
|
@ -0,0 +1,22 @@
|
|||
import logging
|
||||
|
||||
import boto3
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
client = boto3.client('apigateway')
|
||||
|
||||
template = "https://aws-gcr-solutions-us-east-1.s3.amazonaws.com/extension-for-stable-diffusion-on-aws/ec2.yaml"
|
||||
|
||||
|
||||
class TestWebUiClient:
|
||||
|
||||
@classmethod
|
||||
def setup_class(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def teardown_class(self):
|
||||
pass
|
||||
|
||||
def test_1_create_webui_by_template(self):
|
||||
print(template)
|
||||
|
|
@ -0,0 +1 @@
|
|||
|
||||