Skip to content

Commit

Permalink
Fix airlock tests
Browse files Browse the repository at this point in the history
  • Loading branch information
marrobi committed Nov 13, 2023
1 parent 5c069a7 commit 926f00e
Show file tree
Hide file tree
Showing 10 changed files with 52 additions and 42 deletions.
2 changes: 1 addition & 1 deletion airlock_processor/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.7.2"
__version__ = "0.7.3"
2 changes: 1 addition & 1 deletion airlock_processor/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Do not include azure-functions-worker as it may conflict with the Azure Functions platform
azure-core==1.29.5
azure-functions==1.17.0
azure-storage-blob==12.15.0
azure-storage-blob==12.19.0
azure-identity==1.14.1
azure-mgmt-storage==21.1.0
azure-mgmt-resource==23.0.1
Expand Down
19 changes: 11 additions & 8 deletions airlock_processor/shared_code/blob_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,16 +69,19 @@ def copy_data(source_account_name: str, destination_account_name: str, request_i
logging.error(msg)
raise NoFilesInRequestException(msg)

udk = source_blob_service_client.get_user_delegation_key(datetime.datetime.utcnow() - datetime.timedelta(hours=1),
datetime.datetime.utcnow() + datetime.timedelta(hours=1))

# token geneation with expiry of 1 hour. since its not shared, we can leave it to expire (no need to track/delete)
# Remove sas token if not needed: https://github.com/microsoft/AzureTRE/issues/2034
sas_token = generate_container_sas(account_name=source_account_name,
container_name=container_name,
user_delegation_key=udk,
permission=ContainerSasPermissions(read=True),
expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=1))
start = datetime.utcnow() - timedelta(minutes=15)
expiry = datetime.utcnow() + timedelta(hours=1)
udk = blob_service_client.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry)

token = generate_container_sas(container_name=airlock_request.id,
account_name=account_name,
user_delegation_key=udk,
permission=required_permission,
start=start,
expiry=expiry)


source_blob = source_container_client.get_blob_client(blob_name)
source_url = f'{source_blob.url}?{sas_token}'
Expand Down
2 changes: 1 addition & 1 deletion api_app/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.16.6"
__version__ = "0.16.7"
2 changes: 1 addition & 1 deletion api_app/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ azure-identity==1.14.1
azure-mgmt-cosmosdb==9.3.0
azure-mgmt-compute==30.3.0
azure-mgmt-costmanagement==4.0.1
azure-storage-blob==12.15.0
azure-storage-blob==12.19.0
azure-servicebus==7.11.3
azure-eventgrid==4.15.0
fastapi==0.104.0
Expand Down
11 changes: 9 additions & 2 deletions api_app/services/airlock.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,14 +107,22 @@ def get_airlock_request_container_sas_token(account_name: str,
airlock_request: AirlockRequest):
blob_service_client = BlobServiceClient(account_url=get_account_url(account_name),
credential=credentials.get_credential())

start = datetime.utcnow() - timedelta(minutes=15)
expiry = datetime.utcnow() + timedelta(hours=config.AIRLOCK_SAS_TOKEN_EXPIRY_PERIOD_IN_HOURS)
udk = blob_service_client.get_user_delegation_key(datetime.utcnow(), expiry)

try:
udk = blob_service_client.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry)
except Exception:
raise Exception("Failed getting user delegation key, has the API identity been granted 'Storage Blob Data Contributor' access to the storage account {account_name}?")

required_permission = get_required_permission(airlock_request)

token = generate_container_sas(container_name=airlock_request.id,
account_name=account_name,
user_delegation_key=udk,
permission=required_permission,
start=start,
expiry=expiry)

return "https://{}.blob.{}/{}?{}" \
Expand Down Expand Up @@ -267,7 +275,6 @@ async def save_and_publish_event_airlock_request(airlock_request: AirlockRequest
# First check we have some email addresses so we can notify people.
access_service = get_access_service()
role_assignment_details = access_service.get_workspace_role_assignment_details(workspace)
check_email_exists(role_assignment_details)

try:
logging.debug(f"Saving airlock request item: {airlock_request.id}")
Expand Down
11 changes: 7 additions & 4 deletions api_app/services/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
]

LOGGERS_FOR_ERRORS_ONLY = [
"urllib3.connectionpool",
"uamqp",
"uamqp.authentication.cbs_auth_async",
"uamqp.async_ops.client_async",
Expand All @@ -38,25 +39,27 @@
"uamqp.async_ops.session_async",
"uamqp.sender",
"uamqp.client",
"azure.identity._persistent_cache",
"azure.servicebus.aio._base_handler_async",
"azure.servicebus._pyamqp.aio._cbs_async",
"azure.servicebus._pyamqp.aio._connection_async",
"azure.servicebus._pyamqp.aio._link_async",
"azure.servicebus._pyamqp.aio._management_link_async",
"azure.servicebus._pyamqp.aio._session_async"
"azure.servicebus._pyamqp.aio._session_async",
"azure.servicebus._pyamqp.aio._client_async"
]


def disable_unwanted_loggers():
"""
Disables the unwanted loggers.
"""
for logger_name in UNWANTED_LOGGERS:
logging.getLogger(logger_name).disabled = True

for logger_name in LOGGERS_FOR_ERRORS_ONLY:
logging.getLogger(logger_name).setLevel(logging.ERROR)

for logger_name in UNWANTED_LOGGERS:
logging.getLogger(logger_name).disabled = True


def telemetry_processor_callback_function(envelope):
envelope.tags['ai.cloud.role'] = 'api'
Expand Down
23 changes: 7 additions & 16 deletions e2e_tests/airlock/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import os
from urllib.parse import urlparse
import mimetypes
from azure.storage.blob import ContentSettings
from azure.storage.blob import BlobClient, ContentSettings
from airlock import strings
from e2e_tests.helpers import get_auth_header, get_full_endpoint

Expand Down Expand Up @@ -66,21 +66,12 @@ async def upload_blob_using_sas(file_path: str, sas_url: str):

blob_url = f"{storage_account_url}{container_name}/{file_name}?{parsed_sas_url.query}"
LOGGER.info(f"uploading [{file_name}] to container [{blob_url}]")
with open(file_path, "rb") as fh:
headers = {"x-ms-blob-type": "BlockBlob"}
content_type = ""
if file_ext != "":
content_type = ContentSettings(
content_type=mimetypes.types_map[file_ext]
).content_type

response = await client.put(
url=blob_url,
files={'upload-file': (file_name, fh, content_type)},
headers=headers
)
LOGGER.info(f"response code: {response.status_code}")
return response

client = BlobClient.from_blob_url(blob_url)
with open(file_name,'rb') as data:
response = client.upload_blob(data)

return response


async def wait_for_status(
Expand Down
2 changes: 1 addition & 1 deletion e2e_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ async def clean_up_test_workspace_service(pre_created_workspace_service_id: str,
@pytest.fixture(scope="session")
async def setup_test_workspace(verify) -> Tuple[str, str, str]:
pre_created_workspace_id = config.TEST_WORKSPACE_ID
# Set up
# Set up - uses a pre created app reg as has appropriate roles assigned
workspace_path, workspace_id = await create_or_get_test_workspace(
auth_type="Manual", verify=verify, pre_created_workspace_id=pre_created_workspace_id, client_id=config.TEST_WORKSPACE_APP_ID, client_secret=config.TEST_WORKSPACE_APP_SECRET)

Expand Down
20 changes: 13 additions & 7 deletions e2e_tests/test_airlock.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,16 +51,22 @@ async def submit_airlock_import_request(workspace_path: str, workspace_owner_tok
wait_time = 30
while not blob_uploaded:
LOGGER.info(f"try #{i} to upload a blob to container [{container_url}]")
upload_response = await upload_blob_using_sas(BLOB_FILE_PATH, container_url)

if upload_response.status_code == 404:
try:
await asyncio.sleep(5)
upload_response = await upload_blob_using_sas(BLOB_FILE_PATH, container_url)
if "etag" in upload_response:
blob_uploaded = True
else:
raise Exception("upload failed")
except ResourceNotFoundError:
i += 1
LOGGER.info(f"sleeping for {wait_time} sec until container would be created")
await asyncio.sleep(wait_time)
else:
assert upload_response.status_code == 201
LOGGER.info("upload blob succeeded")
blob_uploaded = True
pass
except Exception as e:
LOGGER.error(f"upload blob failed with exception: {e}")
raise e


# submit request
LOGGER.info("Submitting airlock request")
Expand Down

0 comments on commit 926f00e

Please sign in to comment.