Skip to content

Load Test scripts added #52

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions loadtestutils/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
FROM python:3.8-alpine as base

#https://stackoverflow.com/questions/66118337/how-to-get-rid-of-cryptography-build-error

# this is needed to compile psutil and any other packages which require build from source
RUN apk add --no-cache --virtual .build-deps gcc musl-dev linux-headers python3-dev libffi-dev cargo openssl-dev

# note to always change version otherwise docker may use cache
RUN pip install --user azure-storage-blob==12.5.0

RUN apk del .build-deps

# copy deps to new image
FROM python:3.8-alpine AS build-image
COPY --from=base /root/.local /root/.local

# Make sure scripts in .local are usable:
ENV PATH=/root/.local/bin:$PATH

COPY storageaccountwriter/storageaccountwriter.py .
# command to run on container start
CMD [ "python", "storageaccountwriter.py"]
22 changes: 22 additions & 0 deletions loadtestutils/createimage.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#!/usr/bin/env bash


export AWS_PROFILE=prod
version="1.0.0"
image_name="storageaccountwriter"
account_id="956882708938"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we replace the account_id with a dummy value here?

region="us-east-2"
image_tag="$image_name:$version"

docker build --tag $image_tag -f "Dockerfile" .

docker image ls "$image_name"


docker tag $image_tag $account_id.dkr.ecr.$region.amazonaws.com/$image_tag

aws ecr get-login-password --region $region | docker login --username AWS --password-stdin $account_id.dkr.ecr.$region.amazonaws.com

docker push $account_id.dkr.ecr.$region.amazonaws.com/$image_tag


107 changes: 107 additions & 0 deletions loadtestutils/experiment1/starttest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
# generates ecs tasks for experiment 1
import boto3

ecs_client = boto3.client('ecs', region_name="us-east-2")
cw_client = boto3.client('logs', region_name="us-east-2")


def create_task(experiment_number, numberOfFiles, fileSize, storageAccount, accessKey):
response = ecs_client.register_task_definition(
family='StorageAccountWriter%s_%d_%d' % (storageAccount, experiment_number, numberOfFiles),
executionRoleArn='arn:aws:iam::956882708938:role/ecsTaskExecutionRole',
networkMode='awsvpc',
containerDefinitions=[
{
'name': 'storageaccountwriter',
'image': '956882708938.dkr.ecr.us-east-2.amazonaws.com/storageaccountwriter:1.0.0',
'memoryReservation': 128,
'portMappings': [],
'essential': True,
'environment': [
{'name': "AccountName", "value": storageAccount},
{'name': "AccessKey", "value": accessKey},
{'name': "BlobName", "value": "blob_from_image.json"},
{'name': "ContainerName", "value": "appendblobexp%d-%d-%d" % (experiment_number, numberOfFiles, fileSize)},
{'name': "MaxLogFileSize", "value": str(fileSize)}
],
'logConfiguration': {
"logDriver": "awslogs",
"options": {
"awslogs-group": "/ecs/StorageAccountWriter%s_%d_%d" % (storageAccount, experiment_number, numberOfFiles),
"awslogs-region": "us-east-2",
"awslogs-stream-prefix": "ecs"
}
}
},
],
inferenceAccelerators=[],
volumes=[],
requiresCompatibilities=['FARGATE'],
cpu='256',
memory='512'
)
print(response)
return response

def run_task(task_definition, blob_name):
response = ecs_client.run_task(
cluster='arn:aws:ecs:us-east-2:956882708938:cluster/AzurePerfTesting',
count=1,
enableECSManagedTags=True,
launchType='FARGATE',
networkConfiguration={
'awsvpcConfiguration': {
'subnets': [
"subnet-3f03f656", "subnet-b88ba4f2", "subnet-04a9a67c"
],
'securityGroups': [
"sg-0ae3d00960d684538"
],
'assignPublicIp': 'ENABLED'
}
},
overrides={
'containerOverrides': [
{
'name': 'storageaccountwriter',
'environment': [
{
'name': 'BlobName',
'value': blob_name
},
]
},
]
},
taskDefinition=task_definition
)
print(response)

def create_cluster():
pass


def createStorageAccount():
pass


def startExperiment1():
storageAccount="allbloblogseastus"
storageAccountAccessKey="<storage account accessKey>"
numberOfFiles=5000
fileSize=100*1024*1024
experiment_number = 4
try:
cw_client.create_log_group(logGroupName="/ecs/StorageAccountWriter%s_%d_%d" % (storageAccount, experiment_number, numberOfFiles))
except Exception as e:
print(e)
response = create_task(experiment_number, numberOfFiles, fileSize, storageAccount, storageAccountAccessKey)
revision_number = response.get("taskDefinition", {}).get("taskDefinitionArn").rsplit(":", 1)[-1]
task_definition = 'StorageAccountWriter%s_%d_%d' % (storageAccount, experiment_number, numberOfFiles) + ":" + str(revision_number)
print("Using revision", revision_number, task_definition)
for i in range(1, numberOfFiles+1):
blob_name = 'appendblobfile%s_%d_%d_%d_%d.json' % (storageAccount, experiment_number, numberOfFiles, fileSize, i)
run_task(task_definition, blob_name)

if __name__ == '__main__':
startExperiment1()
1 change: 1 addition & 0 deletions loadtestutils/storageaccountwriter/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
azure-storage-blob==12.5.0
155 changes: 155 additions & 0 deletions loadtestutils/storageaccountwriter/storageaccountwriter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
import os
import uuid
from datetime import datetime
from azure.core.exceptions import ResourceExistsError
from azure.storage.blob import BlobServiceClient,BlobBlock # BlockBlobService
# from azure.storage.blob.models import BlobBlock


def getLastLogLineNumber(blob_client, current_file_size):
if current_file_size <= 0:
return 0
offset = max(current_file_size-500, 0)
blob_data = blob_client.download_blob(offset=offset, length=512)
data = blob_data.content_as_text()
line_num = data.rsplit("LineNo\":", 1)[-1].rstrip('}\n').strip()
return int(line_num)


def utf8len(s):
if not isinstance(s, bytes):
s = s.encode('utf-8')

length = len(s)
del s
return length

def get_current_blocks(block_blob_service, container_name, filename):
blocks = []
if block_blob_service.exists(container_name,
filename):
blockslist = block_blob_service.get_block_list(
container_name, filename, None, 'all')
for block in blockslist.committed_blocks:
blocks.append(BlobBlock(id=block.id))
return blocks

def get_random_name(length=32):
return str(uuid.uuid4())

def create_or_update_blockblob(block_blob_service_client, current_file_size, log_line_num, container_name, blob_name, account_name, blocks):
max_file_size = int(os.getenv("MaxLogFileSize"))
logline = '''{ "time": "TIMESTAMP", "resourceId": "/SUBSCRIPTIONS/C088DC46-D692-42AD-A4B6-9A542D28AD2A/RESOURCEGROUPS/SUMOAUDITCOLLECTION/PROVIDERS/MICROSOFT.WEB/SITES/HIMTEST", "operationName": "Microsoft.Web/sites/log", "category": "AppServiceConsoleLogs", "resultDescription": "000000000 WARNING:root:testing warn level\\n\\n", "level": "Error", "EventStampType": "Stamp", "EventPrimaryStampName": "waws-prod-blu-161", "EventStampName": "waws-prod-blu-161h", "Host": "RD501AC57BA3D4", "LineNo": LINENUM}'''

while current_file_size < max_file_size:
# since (4*1024*1024)/512(size of logline) = 8192
msg = []
block_id = get_random_name()
for idx in range(8192):
log_line_num += 1
current_datetime = datetime.now().isoformat()
cur_msg = logline.replace("TIMESTAMP", current_datetime)
cur_msg = cur_msg.replace("LINENUM", f'{log_line_num:10d}')
msg.append(cur_msg)
chunk = "\n".join(msg) + "\n"
fileBytes = chunk.encode()
block_blob_service_client.stage_block(block_id=block_id, data=fileBytes)
cur_size = utf8len(chunk)
current_file_size += cur_size
blocks.append(BlobBlock(block_id=block_id))
block_blob_service_client.commit_block_list(blocks)
print(f"current_chunk_size (in MB): {cur_size/(1024*1024)} log_line_num: {log_line_num} current_file_size: {current_file_size/(1024*1024)} storage: {account_name} container: {container_name} blob: {blob_name} ")
print("inserted %s" % len(blocks))

def upload_file_chunks_using_block_blobs():
account_name = os.getenv("AccountName")
account_access_key = os.getenv("AccessKey")
blob_name = os.getenv("BlobName")
container_name = os.getenv("ContainerName")

blob_service_client = BlobServiceClient(account_url="https://%s.blob.core.windows.net" % account_name, credential=account_access_key)

container_client = blob_service_client.get_container_client(container_name)
blob_client = None
try:
container_client.create_container()
except ResourceExistsError:
print("Container Already Exists")

blob_client = container_client.get_blob_client(blob_name)
if not blob_client.exists():
create_or_update_blockblob(blob_client, 0, 0, container_name, blob_name, account_name, [])
else:
blocks = get_current_blocks(blob_client, container_name, blob_name)
current_file_size = blob_client.get_blob_properties().size
log_line_num = getLastLogLineNumber(blob_client, current_file_size)
create_or_update_blockblob(blob_client, current_file_size, log_line_num, container_name, blob_name, account_name, blocks)


def upload_file_chunks_using_append_blobs():

'''
azure-storage-blob==12.5.0
https://docs.microsoft.com/en-us/python/api/overview/azure/storage-blob-readme?view=azure-python
'''
# blob_path = "resourceId=/SUBSCRIPTIONS/C088DC46-D692-42AD-A4B6-9A542D28AD2A/RESOURCEGROUPS/SUMOAUDITCOLLECTION/PROVIDERS/MICROSOFT.WEB/SITES/HIMTEST/y=2020/m=11/d=02/h=06/m=00/"

account_name = os.getenv("AccountName")
account_access_key = os.getenv("AccessKey")
blob_name = os.getenv("BlobName")
container_name = os.getenv("ContainerName")
max_file_size = int(os.getenv("MaxLogFileSize"))
blob_service_client = BlobServiceClient(account_url="https://%s.blob.core.windows.net" % account_name, credential=account_access_key)

container_client = blob_service_client.get_container_client(container_name)
blob_client = None
try:
container_client.create_container()
except ResourceExistsError:
print("Container Already Exists")

blob_client = container_client.get_blob_client(blob_name)
if not blob_client.exists():
blob_client.create_append_blob()
current_file_size = 0
print(f"Creating new file storage: {account_name} container: {container_name} blob: {blob_name} ")
log_line_num = 0
else:
current_file_size = blob_client.get_blob_properties().size
log_line_num = getLastLogLineNumber(blob_client, current_file_size)

print(f"current_file_size (in MB): {current_file_size/(1024*1024)} log_line_num: {log_line_num} storage: {account_name} container: {container_name} blob: {blob_name} ")
logline = '''{ "time": "TIMESTAMP", "resourceId": "/SUBSCRIPTIONS/C088DC46-D692-42AD-A4B6-9A542D28AD2A/RESOURCEGROUPS/SUMOAUDITCOLLECTION/PROVIDERS/MICROSOFT.WEB/SITES/HIMTEST", "operationName": "Microsoft.Web/sites/log", "category": "AppServiceConsoleLogs", "resultDescription": "000000000 WARNING:root:testing warn level\\n\\n", "level": "Error", "EventStampType": "Stamp", "EventPrimaryStampName": "waws-prod-blu-161", "EventStampName": "waws-prod-blu-161h", "Host": "RD501AC57BA3D4", "LineNo": LINENUM}'''

while current_file_size < max_file_size:
# since (4*1024*1024)/512(size of logline) = 8192
msg = []
for idx in range(8192):
log_line_num += 1
current_datetime = datetime.now().isoformat()
cur_msg = logline.replace("TIMESTAMP", current_datetime)
cur_msg = cur_msg.replace("LINENUM", f'{log_line_num:10d}')
msg.append(cur_msg)

chunk = "\n".join(msg) + "\n"
cur_size = utf8len(chunk)
current_file_size += cur_size
print(f"current_chunk_size (in MB): {cur_size/(1024*1024)} log_line_num: {log_line_num} current_file_size: {current_file_size/(1024*1024)} storage: {account_name} container: {container_name} blob: {blob_name} ")
blob_client.append_block(chunk)
# time.sleep(20)

print(f"Finished uploading current_file_size (in MB): {current_file_size/(1024*1024)} last_log_line_num: {log_line_num} storage: {account_name} container: {container_name} blob: {blob_name} ")


if __name__ == '__main__':
# for testing locally uncomment below code
# os.environ({
# "AccountName": "allbloblogseastus",
# "AccessKey": "<storage account access key>",
# "BlobName": "blob_1_with_newline.json",
# "ContainerName": "testappendblob",
# "MaxLogFileSize": str(1*1024*1024)}
# )
upload_file_chunks_using_append_blobs()
# upload_file_chunks_using_block_blobs()

7 changes: 7 additions & 0 deletions loadtestutils/testimage.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/usr/bin/env bash


version="1.0.0"

# if --env-file is placed after it command it gives context error
docker run --env-file test.env -it "storageaccountwriter:$version"