diff --git a/CHANGELOG.md b/CHANGELOG.md index e1128ce44e9..4298915a309 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [1.6.0] - 2020-09-22 + +### Added +- **Metrics**: Support adding multiple metric values to a single metric name +- **Utilities**: Add new `Validator` utility to validate inbound events and responses using JSON Schema +- **Utilities**: Add new `Event source data classes` utility to easily describe event schema of popular event sources +- **Docs**: Add new `Testing your code` section to both Logger and Metrics page, and content width is now wider +- **Tracer**: Support for automatically disable Tracer when running a Chalice app + +### Fixed +- **Docs**: Improve wording on log sampling feature in Logger, and removed duplicate content on main page +- **Utilities**: Remove DeleteMessageBatch API call when there are no messages to delete + ## [1.5.0] - 2020-09-04 ### Added diff --git a/Makefile b/Makefile index 20da3040bb9..2b841f362c7 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ target: dev: pip install --upgrade pip poetry pre-commit - poetry install + poetry install --extras "jmespath" pre-commit install dev-docs: diff --git a/README.md b/README.md index 10b5a18488b..9d0140dc049 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ ![Build](https://github.com/awslabs/aws-lambda-powertools/workflows/Powertools%20Python/badge.svg?branch=master) ![PythonSupport](https://img.shields.io/static/v1?label=python&message=3.6%20|%203.7|%203.8&color=blue?style=flat-square&logo=python) ![PyPI version](https://badge.fury.io/py/aws-lambda-powertools.svg) ![PyPi monthly downloads](https://img.shields.io/pypi/dm/aws-lambda-powertools) -A suite of utilities for AWS Lambda functions that makes tracing with AWS X-Ray, structured logging and creating custom metrics asynchronously easier. +A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, and more. **[📜Documentation](https://awslabs.github.io/aws-lambda-powertools-python/)** | **[API Docs](https://awslabs.github.io/aws-lambda-powertools-python/api/)** | **[🐍PyPi](https://pypi.org/project/aws-lambda-powertools/)** | **[Feature request](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=)** | **[🐛Bug Report](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=bug%2C+triage&template=bug_report.md&title=)** | **[Kitchen sink example](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/example)** | **[Detailed blog post](https://aws.amazon.com/blogs/opensource/simplifying-serverless-best-practices-with-lambda-powertools/)** diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index bff0c84e03f..5b04e8a3f67 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -4,6 +4,7 @@ import numbers import os import pathlib +from collections import defaultdict from enum import Enum from typing import Any, Dict, List, Union @@ -79,7 +80,7 @@ class MetricManager: def __init__( self, - metric_set: Dict[str, str] = None, + metric_set: Dict[str, Any] = None, dimension_set: Dict = None, namespace: str = None, metadata_set: Dict[str, Any] = None, @@ -93,7 +94,7 @@ def __init__( self._metric_unit_options = list(MetricUnit.__members__) self.metadata_set = self.metadata_set if metadata_set is not None else {} - def add_metric(self, name: str, unit: MetricUnit, value: Union[float, int]): + def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float): """Adds given metric Example @@ -110,9 +111,9 @@ def add_metric(self, name: str, unit: MetricUnit, value: Union[float, int]): ---------- name : str Metric name - unit : MetricUnit + unit : Union[MetricUnit, str] `aws_lambda_powertools.helper.models.MetricUnit` - value : Union[float, int] + value : float Metric value Raises @@ -124,7 +125,9 @@ def add_metric(self, name: str, unit: MetricUnit, value: Union[float, int]): raise MetricValueError(f"{value} is not a valid number") unit = self.__extract_metric_unit_value(unit=unit) - metric = {"Unit": unit, "Value": float(value)} + metric: Dict = self.metric_set.get(name, defaultdict(list)) + metric["Unit"] = unit + metric["Value"].append(float(value)) logger.debug(f"Adding metric: {name} with {metric}") self.metric_set[name] = metric @@ -182,7 +185,7 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None, me logger.debug({"details": "Serializing metrics", "metrics": metrics, "dimensions": dimensions}) metric_names_and_units: List[Dict[str, str]] = [] # [ { "Name": "metric_name", "Unit": "Count" } ] - metric_names_and_values: Dict[str, str] = {} # { "metric_name": 1.0 } + metric_names_and_values: Dict[str, float] = {} # { "metric_name": 1.0 } for metric_name in metrics: metric: dict = metrics[metric_name] diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 25caacb651e..0aab57ee39a 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -622,6 +622,7 @@ def _is_tracer_disabled() -> bool: """ logger.debug("Verifying whether Tracing has been disabled") is_lambda_sam_cli = os.getenv("AWS_SAM_LOCAL") + is_chalice_cli = os.getenv("AWS_CHALICE_CLI_MODE") env_option = str(os.getenv("POWERTOOLS_TRACE_DISABLED", "false")) disabled_env = strtobool(env_option) @@ -629,7 +630,7 @@ def _is_tracer_disabled() -> bool: logger.debug("Tracing has been disabled via env var POWERTOOLS_TRACE_DISABLED") return disabled_env - if is_lambda_sam_cli: + if is_lambda_sam_cli or is_chalice_cli: logger.debug("Running under SAM CLI env or not in Lambda env; disabling Tracing") return True diff --git a/aws_lambda_powertools/utilities/batch/sqs.py b/aws_lambda_powertools/utilities/batch/sqs.py index 4a4aa9c98b1..7da8ab52288 100644 --- a/aws_lambda_powertools/utilities/batch/sqs.py +++ b/aws_lambda_powertools/utilities/batch/sqs.py @@ -115,7 +115,9 @@ def _clean(self): queue_url = self._get_queue_url() entries_to_remove = self._get_entries_to_clean() - delete_message_response = self.client.delete_message_batch(QueueUrl=queue_url, Entries=entries_to_remove) + delete_message_response = None + if entries_to_remove: + delete_message_response = self.client.delete_message_batch(QueueUrl=queue_url, Entries=entries_to_remove) if self.suppress_exception: logger.debug(f"{len(self.fail_messages)} records failed processing, but exceptions are suppressed") diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py new file mode 100644 index 00000000000..47ca29c2148 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/__init__.py @@ -0,0 +1,24 @@ +from .alb_event import ALBEvent +from .api_gateway_proxy_event import APIGatewayProxyEvent, APIGatewayProxyEventV2 +from .cloud_watch_logs_event import CloudWatchLogsEvent +from .dynamo_db_stream_event import DynamoDBStreamEvent +from .event_bridge_event import EventBridgeEvent +from .kinesis_stream_event import KinesisStreamEvent +from .s3_event import S3Event +from .ses_event import SESEvent +from .sns_event import SNSEvent +from .sqs_event import SQSEvent + +__all__ = [ + "APIGatewayProxyEvent", + "APIGatewayProxyEventV2", + "ALBEvent", + "CloudWatchLogsEvent", + "DynamoDBStreamEvent", + "EventBridgeEvent", + "KinesisStreamEvent", + "S3Event", + "SESEvent", + "SNSEvent", + "SQSEvent", +] diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py new file mode 100644 index 00000000000..6c7cb9e60c3 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py @@ -0,0 +1,38 @@ +from typing import Dict, List, Optional + +from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent, DictWrapper + + +class ALBEventRequestContext(DictWrapper): + @property + def elb_target_group_arn(self) -> str: + return self["requestContext"]["elb"]["targetGroupArn"] + + +class ALBEvent(BaseProxyEvent): + """Application load balancer event + + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/services-alb.html + """ + + @property + def request_context(self) -> ALBEventRequestContext: + return ALBEventRequestContext(self._data) + + @property + def http_method(self) -> str: + return self["httpMethod"] + + @property + def path(self) -> str: + return self["path"] + + @property + def multi_value_query_string_parameters(self) -> Optional[Dict[str, List[str]]]: + return self.get("multiValueQueryStringParameters") + + @property + def multi_value_headers(self) -> Optional[Dict[str, List[str]]]: + return self.get("multiValueHeaders") diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py new file mode 100644 index 00000000000..a7d75eadaa6 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py @@ -0,0 +1,382 @@ +from typing import Any, Dict, List, Optional + +from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent, DictWrapper + + +class APIGatewayEventIdentity(DictWrapper): + @property + def access_key(self) -> Optional[str]: + return self["requestContext"]["identity"].get("accessKey") + + @property + def account_id(self) -> Optional[str]: + """The AWS account ID associated with the request.""" + return self["requestContext"]["identity"].get("accountId") + + @property + def api_key(self) -> Optional[str]: + """For API methods that require an API key, this variable is the API key associated with the method request. + For methods that don't require an API key, this variable is null. """ + return self["requestContext"]["identity"].get("apiKey") + + @property + def api_key_id(self) -> Optional[str]: + """The API key ID associated with an API request that requires an API key.""" + return self["requestContext"]["identity"].get("apiKeyId") + + @property + def caller(self) -> Optional[str]: + """The principal identifier of the caller making the request.""" + return self["requestContext"]["identity"].get("caller") + + @property + def cognito_authentication_provider(self) -> Optional[str]: + """A comma-separated list of the Amazon Cognito authentication providers used by the caller + making the request. Available only if the request was signed with Amazon Cognito credentials.""" + return self["requestContext"]["identity"].get("cognitoAuthenticationProvider") + + @property + def cognito_authentication_type(self) -> Optional[str]: + """The Amazon Cognito authentication type of the caller making the request. + Available only if the request was signed with Amazon Cognito credentials.""" + return self["requestContext"]["identity"].get("cognitoAuthenticationType") + + @property + def cognito_identity_id(self) -> Optional[str]: + """The Amazon Cognito identity ID of the caller making the request. + Available only if the request was signed with Amazon Cognito credentials.""" + return self["requestContext"]["identity"].get("cognitoIdentityId") + + @property + def cognito_identity_pool_id(self) -> Optional[str]: + """The Amazon Cognito identity pool ID of the caller making the request. + Available only if the request was signed with Amazon Cognito credentials.""" + return self["requestContext"]["identity"].get("cognitoIdentityPoolId") + + @property + def principal_org_id(self) -> Optional[str]: + """The AWS organization ID.""" + return self["requestContext"]["identity"].get("principalOrgId") + + @property + def source_ip(self) -> str: + """The source IP address of the TCP connection making the request to API Gateway.""" + return self["requestContext"]["identity"]["sourceIp"] + + @property + def user(self) -> Optional[str]: + """The principal identifier of the user making the request.""" + return self["requestContext"]["identity"].get("user") + + @property + def user_agent(self) -> Optional[str]: + """The User Agent of the API caller.""" + return self["requestContext"]["identity"].get("userAgent") + + @property + def user_arn(self) -> Optional[str]: + """The Amazon Resource Name (ARN) of the effective user identified after authentication.""" + return self["requestContext"]["identity"].get("userArn") + + +class APIGatewayEventAuthorizer(DictWrapper): + @property + def claims(self) -> Optional[Dict[str, Any]]: + return self["requestContext"]["authorizer"].get("claims") + + @property + def scopes(self) -> Optional[List[str]]: + return self["requestContext"]["authorizer"].get("scopes") + + +class APIGatewayEventRequestContext(DictWrapper): + @property + def account_id(self) -> str: + """The AWS account ID associated with the request.""" + return self["requestContext"]["accountId"] + + @property + def api_id(self) -> str: + """The identifier API Gateway assigns to your API.""" + return self["requestContext"]["apiId"] + + @property + def authorizer(self) -> APIGatewayEventAuthorizer: + return APIGatewayEventAuthorizer(self._data) + + @property + def connected_at(self) -> Optional[int]: + """The Epoch-formatted connection time. (WebSocket API)""" + return self["requestContext"].get("connectedAt") + + @property + def connection_id(self) -> Optional[str]: + """A unique ID for the connection that can be used to make a callback to the client. (WebSocket API)""" + return self["requestContext"].get("connectionId") + + @property + def domain_name(self) -> Optional[str]: + """A domain name""" + return self["requestContext"].get("domainName") + + @property + def domain_prefix(self) -> Optional[str]: + return self["requestContext"].get("domainPrefix") + + @property + def event_type(self) -> Optional[str]: + """The event type: `CONNECT`, `MESSAGE`, or `DISCONNECT`. (WebSocket API)""" + return self["requestContext"].get("eventType") + + @property + def extended_request_id(self) -> Optional[str]: + """An automatically generated ID for the API call, which contains more useful information + for debugging/troubleshooting.""" + return self["requestContext"].get("extendedRequestId") + + @property + def protocol(self) -> str: + """The request protocol, for example, HTTP/1.1.""" + return self["requestContext"]["protocol"] + + @property + def http_method(self) -> str: + """The HTTP method used. Valid values include: DELETE, GET, HEAD, OPTIONS, PATCH, POST, and PUT.""" + return self["requestContext"]["httpMethod"] + + @property + def identity(self) -> APIGatewayEventIdentity: + return APIGatewayEventIdentity(self._data) + + @property + def message_direction(self) -> Optional[str]: + """Message direction (WebSocket API)""" + return self["requestContext"].get("messageDirection") + + @property + def message_id(self) -> Optional[str]: + """A unique server-side ID for a message. Available only when the `eventType` is `MESSAGE`.""" + return self["requestContext"].get("messageId") + + @property + def path(self) -> str: + return self["requestContext"]["path"] + + @property + def stage(self) -> str: + """The deployment stage of the API request """ + return self["requestContext"]["stage"] + + @property + def request_id(self) -> str: + """The ID that API Gateway assigns to the API request.""" + return self["requestContext"]["requestId"] + + @property + def request_time(self) -> Optional[str]: + """The CLF-formatted request time (dd/MMM/yyyy:HH:mm:ss +-hhmm)""" + return self["requestContext"].get("requestTime") + + @property + def request_time_epoch(self) -> int: + """The Epoch-formatted request time.""" + return self["requestContext"]["requestTimeEpoch"] + + @property + def resource_id(self) -> str: + return self["requestContext"]["resourceId"] + + @property + def resource_path(self) -> str: + return self["requestContext"]["resourcePath"] + + @property + def route_key(self) -> Optional[str]: + """The selected route key.""" + return self["requestContext"].get("routeKey") + + +class APIGatewayProxyEvent(BaseProxyEvent): + """AWS Lambda proxy V1 + + Documentation: + -------------- + - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + """ + + @property + def version(self) -> str: + return self["version"] + + @property + def resource(self) -> str: + return self["resource"] + + @property + def path(self) -> str: + return self["path"] + + @property + def http_method(self) -> str: + """The HTTP method used. Valid values include: DELETE, GET, HEAD, OPTIONS, PATCH, POST, and PUT.""" + return self["httpMethod"] + + @property + def multi_value_headers(self) -> Dict[str, List[str]]: + return self["multiValueHeaders"] + + @property + def multi_value_query_string_parameters(self) -> Optional[Dict[str, List[str]]]: + return self.get("multiValueQueryStringParameters") + + @property + def request_context(self) -> APIGatewayEventRequestContext: + return APIGatewayEventRequestContext(self._data) + + @property + def path_parameters(self) -> Optional[Dict[str, str]]: + return self.get("pathParameters") + + @property + def stage_variables(self) -> Optional[Dict[str, str]]: + return self.get("stageVariables") + + +class RequestContextV2Http(DictWrapper): + @property + def method(self) -> str: + return self["requestContext"]["http"]["method"] + + @property + def path(self) -> str: + return self["requestContext"]["http"]["path"] + + @property + def protocol(self) -> str: + """The request protocol, for example, HTTP/1.1.""" + return self["requestContext"]["http"]["protocol"] + + @property + def source_ip(self) -> str: + """The source IP address of the TCP connection making the request to API Gateway.""" + return self["requestContext"]["http"]["sourceIp"] + + @property + def user_agent(self) -> str: + """The User Agent of the API caller.""" + return self["requestContext"]["http"]["userAgent"] + + +class RequestContextV2Authorizer(DictWrapper): + @property + def jwt_claim(self) -> Dict[str, Any]: + return self["jwt"]["claims"] + + @property + def jwt_scopes(self) -> List[str]: + return self["jwt"]["scopes"] + + +class RequestContextV2(DictWrapper): + @property + def account_id(self) -> str: + """The AWS account ID associated with the request.""" + return self["requestContext"]["accountId"] + + @property + def api_id(self) -> str: + """The identifier API Gateway assigns to your API.""" + return self["requestContext"]["apiId"] + + @property + def authorizer(self) -> Optional[RequestContextV2Authorizer]: + authorizer = self["requestContext"].get("authorizer") + return None if authorizer is None else RequestContextV2Authorizer(authorizer) + + @property + def domain_name(self) -> str: + """A domain name """ + return self["requestContext"]["domainName"] + + @property + def domain_prefix(self) -> str: + return self["requestContext"]["domainPrefix"] + + @property + def http(self) -> RequestContextV2Http: + return RequestContextV2Http(self._data) + + @property + def request_id(self) -> str: + """The ID that API Gateway assigns to the API request.""" + return self["requestContext"]["requestId"] + + @property + def route_key(self) -> str: + """The selected route key.""" + return self["requestContext"]["routeKey"] + + @property + def stage(self) -> str: + """The deployment stage of the API request """ + return self["requestContext"]["stage"] + + @property + def time(self) -> str: + """The CLF-formatted request time (dd/MMM/yyyy:HH:mm:ss +-hhmm).""" + return self["requestContext"]["time"] + + @property + def time_epoch(self) -> int: + """The Epoch-formatted request time.""" + return self["requestContext"]["timeEpoch"] + + +class APIGatewayProxyEventV2(BaseProxyEvent): + """AWS Lambda proxy V2 event + + Notes: + ----- + Format 2.0 doesn't have multiValueHeaders or multiValueQueryStringParameters fields. Duplicate headers + are combined with commas and included in the headers field. Duplicate query strings are combined with + commas and included in the queryStringParameters field. + + Format 2.0 includes a new cookies field. All cookie headers in the request are combined with commas and + added to the cookies field. In the response to the client, each cookie becomes a set-cookie header. + + Documentation: + -------------- + - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + """ + + @property + def version(self) -> str: + return self["version"] + + @property + def route_key(self) -> str: + return self["routeKey"] + + @property + def raw_path(self) -> str: + return self["rawPath"] + + @property + def raw_query_string(self) -> str: + return self["rawQueryString"] + + @property + def cookies(self) -> Optional[List[str]]: + return self.get("cookies") + + @property + def request_context(self) -> RequestContextV2: + return RequestContextV2(self._data) + + @property + def path_parameters(self) -> Optional[Dict[str, str]]: + return self.get("pathParameters") + + @property + def stage_variables(self) -> Optional[Dict[str, str]]: + return self.get("stageVariables") diff --git a/aws_lambda_powertools/utilities/data_classes/cloud_watch_logs_event.py b/aws_lambda_powertools/utilities/data_classes/cloud_watch_logs_event.py new file mode 100644 index 00000000000..978f6956fc2 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/cloud_watch_logs_event.py @@ -0,0 +1,101 @@ +import base64 +import json +import zlib +from typing import Dict, List, Optional + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class CloudWatchLogsLogEvent(DictWrapper): + @property + def get_id(self) -> str: + """The ID property is a unique identifier for every log event.""" + # Note: this name conflicts with existing python builtins + return self["id"] + + @property + def timestamp(self) -> int: + """Get the `timestamp` property""" + return self["timestamp"] + + @property + def message(self) -> str: + """Get the `message` property""" + return self["message"] + + @property + def extracted_fields(self) -> Optional[Dict[str, str]]: + """Get the `extractedFields` property""" + return self.get("extractedFields") + + +class CloudWatchLogsDecodedData(DictWrapper): + @property + def owner(self) -> str: + """The AWS Account ID of the originating log data.""" + return self["owner"] + + @property + def log_group(self) -> str: + """The log group name of the originating log data.""" + return self["logGroup"] + + @property + def log_stream(self) -> str: + """The log stream name of the originating log data.""" + return self["logStream"] + + @property + def subscription_filters(self) -> List[str]: + """The list of subscription filter names that matched with the originating log data.""" + return self["subscriptionFilters"] + + @property + def message_type(self) -> str: + """Data messages will use the "DATA_MESSAGE" type. + + Sometimes CloudWatch Logs may emit Kinesis records with a "CONTROL_MESSAGE" type, + mainly for checking if the destination is reachable. + """ + return self["messageType"] + + @property + def log_events(self) -> List[CloudWatchLogsLogEvent]: + """The actual log data, represented as an array of log event records. + + The ID property is a unique identifier for every log event. + """ + return [CloudWatchLogsLogEvent(i) for i in self["logEvents"]] + + +class CloudWatchLogsEvent(DictWrapper): + """CloudWatch Logs log stream event + + You can use a Lambda function to monitor and analyze logs from an Amazon CloudWatch Logs log stream. + + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/services-cloudwatchlogs.html + """ + + _decompressed_logs_data = None + _json_logs_data = None + + @property + def raw_logs_data(self) -> str: + """The value of the `data` field is a Base64 encoded ZIP archive.""" + return self["awslogs"]["data"] + + @property + def decompress_logs_data(self) -> bytes: + """Decode and decompress log data""" + if self._decompressed_logs_data is None: + payload = base64.b64decode(self.raw_logs_data) + self._decompressed_logs_data = zlib.decompress(payload, zlib.MAX_WBITS | 32) + return self._decompressed_logs_data + + def parse_logs_data(self) -> CloudWatchLogsDecodedData: + """Decode, decompress and parse json data as CloudWatchLogsDecodedData""" + if self._json_logs_data is None: + self._json_logs_data = json.loads(self.decompress_logs_data.decode("UTF-8")) + return CloudWatchLogsDecodedData(self._json_logs_data) diff --git a/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py b/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py new file mode 100644 index 00000000000..9364839eb93 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py @@ -0,0 +1,560 @@ +from typing import Any, Dict, List, Optional + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class CallerContext(DictWrapper): + @property + def aws_sdk_version(self) -> str: + """The AWS SDK version number.""" + return self["callerContext"]["awsSdkVersion"] + + @property + def client_id(self) -> str: + """The ID of the client associated with the user pool.""" + return self["callerContext"]["clientId"] + + +class BaseTriggerEvent(DictWrapper): + """Common attributes shared by all User Pool Lambda Trigger Events + + Documentation: + ------------- + https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html + """ + + @property + def version(self) -> str: + """The version number of your Lambda function.""" + return self["version"] + + @property + def region(self) -> str: + """The AWS Region, as an AWSRegion instance.""" + return self["region"] + + @property + def user_pool_id(self) -> str: + """The user pool ID for the user pool.""" + return self["userPoolId"] + + @property + def trigger_source(self) -> str: + """The name of the event that triggered the Lambda function.""" + return self["triggerSource"] + + @property + def user_name(self) -> str: + """The username of the current user.""" + return self["userName"] + + @property + def caller_context(self) -> CallerContext: + """The caller context""" + return CallerContext(self._data) + + +class PreSignUpTriggerEventRequest(DictWrapper): + @property + def user_attributes(self) -> Dict[str, str]: + """One or more name-value pairs representing user attributes. The attribute names are the keys.""" + return self["request"]["userAttributes"] + + @property + def validation_data(self) -> Optional[Dict[str, str]]: + """One or more name-value pairs containing the validation data in the request to register a user.""" + return self["request"].get("validationData") + + @property + def client_metadata(self) -> Optional[Dict[str, str]]: + """One or more key-value pairs that you can provide as custom input to the Lambda function + that you specify for the pre sign-up data_classes.""" + return self["request"].get("clientMetadata") + + +class PreSignUpTriggerEventResponse(DictWrapper): + @property + def auto_confirm_user(self) -> bool: + return bool(self["response"]["autoConfirmUser"]) + + @auto_confirm_user.setter + def auto_confirm_user(self, value: bool): + """Set to true to auto-confirm the user, or false otherwise.""" + self["response"]["autoConfirmUser"] = value + + @property + def auto_verify_email(self) -> bool: + return bool(self["response"]["autoVerifyEmail"]) + + @auto_verify_email.setter + def auto_verify_email(self, value: bool): + """Set to true to set as verified the email of a user who is signing up, or false otherwise.""" + self["response"]["autoVerifyEmail"] = value + + @property + def auto_verify_phone(self) -> bool: + return bool(self["response"]["autoVerifyPhone"]) + + @auto_verify_phone.setter + def auto_verify_phone(self, value: bool): + """Set to true to set as verified the phone number of a user who is signing up, or false otherwise.""" + self["response"]["autoVerifyPhone"] = value + + +class PreSignUpTriggerEvent(BaseTriggerEvent): + """Pre Sign-up Lambda Trigger + + Notes: + ---- + `triggerSource` can be one of the following: + + - `PreSignUp_SignUp` Pre sign-up. + - `PreSignUp_AdminCreateUser` Pre sign-up when an admin creates a new user. + - `PreSignUp_ExternalProvider` Pre sign-up with external provider + + Documentation: + ------------- + - https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-pre-sign-up.html + """ + + @property + def request(self) -> PreSignUpTriggerEventRequest: + return PreSignUpTriggerEventRequest(self._data) + + @property + def response(self) -> PreSignUpTriggerEventResponse: + return PreSignUpTriggerEventResponse(self._data) + + +class PostConfirmationTriggerEventRequest(DictWrapper): + @property + def user_attributes(self) -> Dict[str, str]: + """One or more name-value pairs representing user attributes. The attribute names are the keys.""" + return self["request"]["userAttributes"] + + @property + def client_metadata(self) -> Optional[Dict[str, str]]: + """One or more key-value pairs that you can provide as custom input to the Lambda function + that you specify for the post confirmation data_classes.""" + return self["request"].get("clientMetadata") + + +class PostConfirmationTriggerEvent(BaseTriggerEvent): + """Post Confirmation Lambda Trigger + + Notes: + ---- + `triggerSource` can be one of the following: + + - `PostConfirmation_ConfirmSignUp` Post sign-up confirmation. + - `PostConfirmation_ConfirmForgotPassword` Post Forgot Password confirmation. + + Documentation: + ------------- + - https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-post-confirmation.html + """ + + @property + def request(self) -> PostConfirmationTriggerEventRequest: + return PostConfirmationTriggerEventRequest(self._data) + + +class UserMigrationTriggerEventRequest(DictWrapper): + @property + def password(self) -> str: + return self["request"]["password"] + + @property + def validation_data(self) -> Optional[Dict[str, str]]: + """One or more name-value pairs containing the validation data in the request to register a user.""" + return self["request"].get("validationData") + + @property + def client_metadata(self) -> Optional[Dict[str, str]]: + """One or more key-value pairs that you can provide as custom input to the Lambda function + that you specify for the pre sign-up data_classes.""" + return self["request"].get("clientMetadata") + + +class UserMigrationTriggerEventResponse(DictWrapper): + @property + def user_attributes(self) -> Dict[str, str]: + return self["response"]["userAttributes"] + + @user_attributes.setter + def user_attributes(self, value: Dict[str, str]): + """It must contain one or more name-value pairs representing user attributes to be stored in the + user profile in your user pool. You can include both standard and custom user attributes. + Custom attributes require the custom: prefix to distinguish them from standard attributes.""" + self["response"]["userAttributes"] = value + + @property + def final_user_status(self) -> Optional[str]: + return self["response"].get("finalUserStatus") + + @final_user_status.setter + def final_user_status(self, value: str): + """During sign-in, this attribute can be set to CONFIRMED, or not set, to auto-confirm your users and + allow them to sign-in with their previous passwords. This is the simplest experience for the user. + + If this attribute is set to RESET_REQUIRED, the user is required to change his or her password immediately + after migration at the time of sign-in, and your client app needs to handle the PasswordResetRequiredException + during the authentication flow.""" + self["response"]["finalUserStatus"] = value + + @property + def message_action(self) -> Optional[str]: + return self["response"].get("messageAction") + + @message_action.setter + def message_action(self, value: str): + """This attribute can be set to "SUPPRESS" to suppress the welcome message usually sent by + Amazon Cognito to new users. If this attribute is not returned, the welcome message will be sent.""" + self["response"]["messageAction"] = value + + @property + def desired_delivery_mediums(self) -> Optional[List[str]]: + return self["response"].get("desiredDeliveryMediums") + + @desired_delivery_mediums.setter + def desired_delivery_mediums(self, value: List[str]): + """This attribute can be set to "EMAIL" to send the welcome message by email, or "SMS" to send the + welcome message by SMS. If this attribute is not returned, the welcome message will be sent by SMS.""" + self["response"]["desiredDeliveryMediums"] = value + + @property + def force_alias_creation(self) -> Optional[bool]: + return self["response"].get("forceAliasCreation") + + @force_alias_creation.setter + def force_alias_creation(self, value: bool): + """If this parameter is set to "true" and the phone number or email address specified in the UserAttributes + parameter already exists as an alias with a different user, the API call will migrate the alias from the + previous user to the newly created user. The previous user will no longer be able to log in using that alias. + + If this attribute is set to "false" and the alias exists, the user will not be migrated, and an error is + returned to the client app. + + If this attribute is not returned, it is assumed to be "false". + """ + self["response"]["forceAliasCreation"] = value + + +class UserMigrationTriggerEvent(BaseTriggerEvent): + """Migrate User Lambda Trigger + + Notes: + ---- + `triggerSource` can be one of the following: + + - `UserMigration_Authentication` User migration at the time of sign in. + - `UserMigration_ForgotPassword` User migration during forgot-password flow. + + Documentation: + ------------- + - https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-migrate-user.html + """ + + @property + def request(self) -> UserMigrationTriggerEventRequest: + return UserMigrationTriggerEventRequest(self._data) + + @property + def response(self) -> UserMigrationTriggerEventResponse: + return UserMigrationTriggerEventResponse(self._data) + + +class CustomMessageTriggerEventRequest(DictWrapper): + @property + def code_parameter(self) -> str: + """A string for you to use as the placeholder for the verification code in the custom message.""" + return self["request"]["codeParameter"] + + @property + def username_parameter(self) -> str: + """The username parameter. It is a required request parameter for the admin create user flow.""" + return self["request"]["usernameParameter"] + + @property + def user_attributes(self) -> Dict[str, str]: + """One or more name-value pairs representing user attributes. The attribute names are the keys.""" + return self["request"]["userAttributes"] + + @property + def client_metadata(self) -> Optional[Dict[str, str]]: + """One or more key-value pairs that you can provide as custom input to the Lambda function + that you specify for the pre sign-up data_classes.""" + return self["request"].get("clientMetadata") + + +class CustomMessageTriggerEventResponse(DictWrapper): + @property + def sms_message(self) -> str: + return self["response"]["smsMessage"] + + @property + def email_message(self) -> str: + return self["response"]["emailMessage"] + + @property + def email_subject(self) -> str: + return self["response"]["emailSubject"] + + @sms_message.setter + def sms_message(self, value: str): + """The custom SMS message to be sent to your users. + Must include the codeParameter value received in the request.""" + self["response"]["smsMessage"] = value + + @email_message.setter + def email_message(self, value: str): + """The custom email message to be sent to your users. + Must include the codeParameter value received in the request.""" + self["response"]["emailMessage"] = value + + @email_subject.setter + def email_subject(self, value: str): + """The subject line for the custom message.""" + self["response"]["emailSubject"] = value + + +class CustomMessageTriggerEvent(BaseTriggerEvent): + """Custom Message Lambda Trigger + + Notes: + ---- + `triggerSource` can be one of the following: + + - `CustomMessage_SignUp` To send the confirmation code post sign-up. + - `CustomMessage_AdminCreateUser` To send the temporary password to a new user. + - `CustomMessage_ResendCode` To resend the confirmation code to an existing user. + - `CustomMessage_ForgotPassword` To send the confirmation code for Forgot Password request. + - `CustomMessage_UpdateUserAttribute` When a user's email or phone number is changed, this data_classes sends a + verification code automatically to the user. Cannot be used for other attributes. + - `CustomMessage_VerifyUserAttribute` This data_classes sends a verification code to the user when they manually + request it for a new email or phone number. + - `CustomMessage_Authentication` To send MFA code during authentication. + + Documentation: + -------------- + - https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-custom-message.html + """ + + @property + def request(self) -> CustomMessageTriggerEventRequest: + return CustomMessageTriggerEventRequest(self._data) + + @property + def response(self) -> CustomMessageTriggerEventResponse: + return CustomMessageTriggerEventResponse(self._data) + + +class PreAuthenticationTriggerEventRequest(DictWrapper): + @property + def user_not_found(self) -> Optional[bool]: + """This boolean is populated when PreventUserExistenceErrors is set to ENABLED for your User Pool client.""" + return self["request"].get("userNotFound") + + @property + def user_attributes(self) -> Dict[str, str]: + """One or more name-value pairs representing user attributes.""" + return self["request"]["userAttributes"] + + @property + def validation_data(self) -> Optional[Dict[str, str]]: + """One or more key-value pairs containing the validation data in the user's sign-in request.""" + return self["request"].get("validationData") + + +class PreAuthenticationTriggerEvent(BaseTriggerEvent): + """Pre Authentication Lambda Trigger + + Amazon Cognito invokes this data_classes when a user attempts to sign in, allowing custom validation + to accept or deny the authentication request. + + Notes: + ---- + `triggerSource` can be one of the following: + + - `PreAuthentication_Authentication` Pre authentication. + + Documentation: + -------------- + - https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-pre-authentication.html + """ + + @property + def request(self) -> PreAuthenticationTriggerEventRequest: + """Pre Authentication Request Parameters""" + return PreAuthenticationTriggerEventRequest(self._data) + + +class PostAuthenticationTriggerEventRequest(DictWrapper): + @property + def new_device_used(self) -> bool: + """This flag indicates if the user has signed in on a new device. + It is set only if the remembered devices value of the user pool is set to `Always` or User `Opt-In`.""" + return self["request"]["newDeviceUsed"] + + @property + def user_attributes(self) -> Dict[str, str]: + """One or more name-value pairs representing user attributes.""" + return self["request"]["userAttributes"] + + @property + def client_metadata(self) -> Optional[Dict[str, str]]: + """One or more key-value pairs that you can provide as custom input to the Lambda function + that you specify for the post authentication data_classes.""" + return self["request"].get("clientMetadata") + + +class PostAuthenticationTriggerEvent(BaseTriggerEvent): + """Post Authentication Lambda Trigger + + Amazon Cognito invokes this data_classes after signing in a user, allowing you to add custom logic + after authentication. + + Notes: + ---- + `triggerSource` can be one of the following: + + - `PostAuthentication_Authentication` Post authentication. + + Documentation: + -------------- + - https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-post-authentication.html + """ + + @property + def request(self) -> PostAuthenticationTriggerEventRequest: + """Post Authentication Request Parameters""" + return PostAuthenticationTriggerEventRequest(self._data) + + +class GroupOverrideDetails(DictWrapper): + @property + def groups_to_override(self) -> Optional[List[str]]: + """A list of the group names that are associated with the user that the identity token is issued for.""" + return self.get("groupsToOverride") + + @property + def iam_roles_to_override(self) -> Optional[List[str]]: + """A list of the current IAM roles associated with these groups.""" + return self.get("iamRolesToOverride") + + @property + def preferred_role(self) -> Optional[str]: + """A string indicating the preferred IAM role.""" + return self.get("preferredRole") + + +class PreTokenGenerationTriggerEventRequest(DictWrapper): + @property + def group_configuration(self) -> GroupOverrideDetails: + """The input object containing the current group configuration""" + return GroupOverrideDetails(self["request"]["groupConfiguration"]) + + @property + def user_attributes(self) -> Dict[str, str]: + """One or more name-value pairs representing user attributes.""" + return self["request"]["userAttributes"] + + @property + def client_metadata(self) -> Optional[Dict[str, str]]: + """One or more key-value pairs that you can provide as custom input to the Lambda function + that you specify for the pre token generation data_classes.""" + return self["request"].get("clientMetadata") + + +class ClaimsOverrideDetails(DictWrapper): + @property + def claims_to_add_or_override(self) -> Optional[Dict[str, str]]: + return self.get("claimsToAddOrOverride") + + @property + def claims_to_suppress(self) -> Optional[List[str]]: + return self.get("claimsToSuppress") + + @property + def group_configuration(self) -> Optional[GroupOverrideDetails]: + group_override_details = self.get("groupOverrideDetails") + return None if group_override_details is None else GroupOverrideDetails(group_override_details) + + @claims_to_add_or_override.setter + def claims_to_add_or_override(self, value: Dict[str, str]): + """A map of one or more key-value pairs of claims to add or override. + For group related claims, use groupOverrideDetails instead.""" + self._data["claimsToAddOrOverride"] = value + + @claims_to_suppress.setter + def claims_to_suppress(self, value: List[str]): + """A list that contains claims to be suppressed from the identity token.""" + self._data["claimsToSuppress"] = value + + @group_configuration.setter + def group_configuration(self, value: Dict[str, Any]): + """The output object containing the current group configuration. + + It includes groupsToOverride, iamRolesToOverride, and preferredRole. + + The groupOverrideDetails object is replaced with the one you provide. If you provide an empty or null + object in the response, then the groups are suppressed. To leave the existing group configuration + as is, copy the value of the request's groupConfiguration object to the groupOverrideDetails object + in the response, and pass it back to the service. + """ + self._data["groupOverrideDetails"] = value + + def set_group_configuration_groups_to_override(self, value: List[str]): + """A list of the group names that are associated with the user that the identity token is issued for.""" + self._data.setdefault("groupOverrideDetails", {}) + self["groupOverrideDetails"]["groupsToOverride"] = value + + def set_group_configuration_iam_roles_to_override(self, value: List[str]): + """A list of the current IAM roles associated with these groups.""" + self._data.setdefault("groupOverrideDetails", {}) + self["groupOverrideDetails"]["iamRolesToOverride"] = value + + def set_group_configuration_preferred_role(self, value: str): + """A string indicating the preferred IAM role.""" + self._data.setdefault("groupOverrideDetails", {}) + self["groupOverrideDetails"]["preferredRole"] = value + + +class PreTokenGenerationTriggerEventResponse(DictWrapper): + @property + def claims_override_details(self) -> ClaimsOverrideDetails: + # Ensure we have a `claimsOverrideDetails` element + self._data["response"].setdefault("claimsOverrideDetails", {}) + return ClaimsOverrideDetails(self._data["response"]["claimsOverrideDetails"]) + + +class PreTokenGenerationTriggerEvent(BaseTriggerEvent): + """Pre Token Generation Lambda Trigger + + Amazon Cognito invokes this data_classes before token generation allowing you to customize identity token claims. + + Notes: + ---- + `triggerSource` can be one of the following: + + - `TokenGeneration_HostedAuth` Called during authentication from the Amazon Cognito hosted UI sign-in page. + - `TokenGeneration_Authentication` Called after user authentication flows have completed. + - `TokenGeneration_NewPasswordChallenge` Called after the user is created by an admin. This flow is invoked + when the user has to change a temporary password. + - `TokenGeneration_AuthenticateDevice` Called at the end of the authentication of a user device. + - `TokenGeneration_RefreshTokens` Called when a user tries to refresh the identity and access tokens. + + Documentation: + -------------- + - https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-pre-token-generation.html + """ + + @property + def request(self) -> PreTokenGenerationTriggerEventRequest: + """Pre Token Generation Request Parameters""" + return PreTokenGenerationTriggerEventRequest(self._data) + + @property + def response(self) -> PreTokenGenerationTriggerEventResponse: + """Pre Token Generation Response Parameters""" + return PreTokenGenerationTriggerEventResponse(self._data) diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py new file mode 100644 index 00000000000..73cf1b339ff --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -0,0 +1,65 @@ +from typing import Any, Dict, Optional + + +class DictWrapper: + """Provides a single read only access to a wrapper dict""" + + def __init__(self, data: Dict[str, Any]): + self._data = data + + def __getitem__(self, key: str) -> Any: + return self._data[key] + + def get(self, key: str) -> Optional[Any]: + return self._data.get(key) + + +class BaseProxyEvent(DictWrapper): + @property + def headers(self) -> Dict[str, str]: + return self["headers"] + + @property + def query_string_parameters(self) -> Optional[Dict[str, str]]: + return self.get("queryStringParameters") + + @property + def is_base64_encoded(self) -> bool: + return self.get("isBase64Encoded") + + @property + def body(self) -> Optional[str]: + return self.get("body") + + def get_query_string_value(self, name: str, default_value: Optional[str] = None) -> Optional[str]: + """Get query string value by name + + Parameters + ---------- + name: str + Query string parameter name + default_value: str, optional + Default value if no value was found by name + Returns + ------- + str, optional + Query string parameter value + """ + params = self.query_string_parameters + return default_value if params is None else params.get(name, default_value) + + def get_header_value(self, name: str, default_value: Optional[str] = None) -> Optional[str]: + """Get header value by name + + Parameters + ---------- + name: str + Header name + default_value: str, optional + Default value if no value was found by name + Returns + ------- + str, optional + Header value + """ + return self.headers.get(name, default_value) diff --git a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py new file mode 100644 index 00000000000..db581ceaf7d --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py @@ -0,0 +1,232 @@ +from enum import Enum +from typing import Dict, Iterator, List, Optional + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class AttributeValue(DictWrapper): + """Represents the data for an attribute + + Documentation: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html + """ + + @property + def b_value(self) -> Optional[str]: + """An attribute of type Base64-encoded binary data object + + Example: + >>> {"B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"} + """ + return self.get("B") + + @property + def bs_value(self) -> Optional[List[str]]: + """An attribute of type Array of Base64-encoded binary data objects + + Example: + >>> {"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]} + """ + return self.get("BS") + + @property + def bool_value(self) -> Optional[bool]: + """An attribute of type Boolean + + Example: + >>> {"BOOL": True} + """ + item = self.get("bool") + return None if item is None else bool(item) + + @property + def list_value(self) -> Optional[List["AttributeValue"]]: + """An attribute of type Array of AttributeValue objects + + Example: + >>> {"L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N": "3.14159"}]} + """ + item = self.get("L") + return None if item is None else [AttributeValue(v) for v in item] + + @property + def map_value(self) -> Optional[Dict[str, "AttributeValue"]]: + """An attribute of type String to AttributeValue object map + + Example: + >>> {"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}} + """ + return _attribute_value_dict(self._data, "M") + + @property + def n_value(self) -> Optional[str]: + """An attribute of type Number + + Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages + and libraries. However, DynamoDB treats them as number type attributes for mathematical operations. + + Example: + >>> {"N": "123.45"} + """ + return self.get("N") + + @property + def ns_value(self) -> Optional[List[str]]: + """An attribute of type Number Set + + Example: + >>> {"NS": ["42.2", "-19", "7.5", "3.14"]} + """ + return self.get("NS") + + @property + def null_value(self) -> Optional[bool]: + """An attribute of type Null. + + Example: + >>> {"NULL": True} + """ + item = self.get("NULL") + return None if item is None else bool(item) + + @property + def s_value(self) -> Optional[str]: + """An attribute of type String + + Example: + >>> {"S": "Hello"} + """ + return self.get("S") + + @property + def ss_value(self) -> Optional[List[str]]: + """An attribute of type Array of strings + + Example: + >>> {"SS": ["Giraffe", "Hippo" ,"Zebra"]} + """ + return self.get("SS") + + +def _attribute_value_dict(attr_values: Dict[str, dict], key: str) -> Optional[Dict[str, AttributeValue]]: + """A dict of type String to AttributeValue object map + + Example: + >>> {"NewImage": {"Id": {"S": "xxx-xxx"}, "Value": {"N": "35"}}} + """ + attr_values_dict = attr_values.get(key) + return None if attr_values_dict is None else {k: AttributeValue(v) for k, v in attr_values_dict.items()} + + +class StreamViewType(Enum): + """The type of data from the modified DynamoDB item that was captured in this stream record""" + + KEYS_ONLY = 0 # only the key attributes of the modified item + NEW_IMAGE = 1 # the entire item, as it appeared after it was modified. + OLD_IMAGE = 2 # the entire item, as it appeared before it was modified. + NEW_AND_OLD_IMAGES = 3 # both the new and the old item images of the item. + + +class StreamRecord(DictWrapper): + @property + def approximate_creation_date_time(self) -> Optional[int]: + """The approximate date and time when the stream record was created, in UNIX epoch time format.""" + item = self.get("ApproximateCreationDateTime") + return None if item is None else int(item) + + @property + def keys(self) -> Optional[Dict[str, AttributeValue]]: + """The primary key attribute(s) for the DynamoDB item that was modified.""" + return _attribute_value_dict(self._data, "Keys") + + @property + def new_image(self) -> Optional[Dict[str, AttributeValue]]: + """The item in the DynamoDB table as it appeared after it was modified.""" + return _attribute_value_dict(self._data, "NewImage") + + @property + def old_image(self) -> Optional[Dict[str, AttributeValue]]: + """The item in the DynamoDB table as it appeared before it was modified.""" + return _attribute_value_dict(self._data, "OldImage") + + @property + def sequence_number(self) -> Optional[str]: + """The sequence number of the stream record.""" + return self.get("SequenceNumber") + + @property + def size_bytes(self) -> Optional[int]: + """The size of the stream record, in bytes.""" + item = self.get("SizeBytes") + return None if item is None else int(item) + + @property + def stream_view_type(self) -> Optional[StreamViewType]: + """The type of data from the modified DynamoDB item that was captured in this stream record""" + item = self.get("StreamViewType") + return None if item is None else StreamViewType[str(item)] + + +class DynamoDBRecordEventName(Enum): + INSERT = 0 # a new item was added to the table + MODIFY = 1 # one or more of an existing item's attributes were modified + REMOVE = 2 # the item was deleted from the table + + +class DynamoDBRecord(DictWrapper): + """A description of a unique event within a stream""" + + @property + def aws_region(self) -> Optional[str]: + """The region in which the GetRecords request was received""" + return self.get("awsRegion") + + @property + def dynamodb(self) -> Optional[StreamRecord]: + """The main body of the stream record, containing all of the DynamoDB-specific fields.""" + stream_record = self.get("dynamodb") + return None if stream_record is None else StreamRecord(stream_record) + + @property + def event_id(self) -> Optional[str]: + """A globally unique identifier for the event that was recorded in this stream record.""" + return self.get("eventID") + + @property + def event_name(self) -> Optional[DynamoDBRecordEventName]: + """The type of data modification that was performed on the DynamoDB table""" + item = self.get("eventName") + return None if item is None else DynamoDBRecordEventName[item] + + @property + def event_source(self) -> Optional[str]: + """The AWS service from which the stream record originated. For DynamoDB Streams, this is aws:dynamodb.""" + return self.get("eventSource") + + @property + def event_source_arn(self) -> Optional[str]: + """The Amazon Resource Name (ARN) of the event source""" + return self.get("eventSourceARN") + + @property + def event_version(self) -> Optional[str]: + """The version number of the stream record format.""" + return self.get("eventVersion") + + @property + def user_identity(self) -> Optional[dict]: + """Contains details about the type of identity that made the request""" + return self.get("userIdentity") + + +class DynamoDBStreamEvent(DictWrapper): + """Dynamo DB Stream Event + + Documentation: + ------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html + """ + + @property + def records(self) -> Iterator[DynamoDBRecord]: + for record in self["Records"]: + yield DynamoDBRecord(record) diff --git a/aws_lambda_powertools/utilities/data_classes/event_bridge_event.py b/aws_lambda_powertools/utilities/data_classes/event_bridge_event.py new file mode 100644 index 00000000000..cb299309a69 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/event_bridge_event.py @@ -0,0 +1,64 @@ +from typing import Any, Dict, List + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class EventBridgeEvent(DictWrapper): + """Amazon EventBridge Event + + Documentation: + -------------- + - https://docs.aws.amazon.com/eventbridge/latest/userguide/aws-events.html + """ + + @property + def get_id(self) -> str: + """A unique value is generated for every event. This can be helpful in tracing events as + they move through rules to targets, and are processed.""" + # Note: this name conflicts with existing python builtins + return self["id"] + + @property + def version(self) -> str: + """By default, this is set to 0 (zero) in all events.""" + return self["version"] + + @property + def account(self) -> str: + """The 12-digit number identifying an AWS account.""" + return self["account"] + + @property + def time(self) -> str: + """The event timestamp, which can be specified by the service originating the event. + + If the event spans a time interval, the service might choose to report the start time, so + this value can be noticeably before the time the event is actually received. + """ + return self["time"] + + @property + def region(self) -> str: + """Identifies the AWS region where the event originated.""" + return self["region"] + + @property + def resources(self) -> List[str]: + """This JSON array contains ARNs that identify resources that are involved in the event. + Inclusion of these ARNs is at the discretion of the service.""" + return self["resources"] + + @property + def source(self) -> str: + """Identifies the service that sourced the event. All events sourced from within AWS begin with "aws." """ + return self["source"] + + @property + def detail_type(self) -> str: + """Identifies, in combination with the source field, the fields and values that appear in the detail field.""" + return self["detail-type"] + + @property + def detail(self) -> Dict[str, Any]: + """A JSON object, whose content is at the discretion of the service originating the event. """ + return self["detail"] diff --git a/aws_lambda_powertools/utilities/data_classes/kinesis_stream_event.py b/aws_lambda_powertools/utilities/data_classes/kinesis_stream_event.py new file mode 100644 index 00000000000..6af1484f155 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/kinesis_stream_event.py @@ -0,0 +1,96 @@ +import base64 +import json +from typing import Iterator + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class KinesisStreamRecordPayload(DictWrapper): + @property + def approximate_arrival_timestamp(self) -> float: + """The approximate time that the record was inserted into the stream""" + return float(self["kinesis"]["approximateArrivalTimestamp"]) + + @property + def data(self) -> str: + """The data blob""" + return self["kinesis"]["data"] + + @property + def kinesis_schema_version(self) -> str: + """Schema version for the record""" + return self["kinesis"]["kinesisSchemaVersion"] + + @property + def partition_key(self) -> str: + """Identifies which shard in the stream the data record is assigned to""" + return self["kinesis"]["partitionKey"] + + @property + def sequence_number(self) -> str: + """The unique identifier of the record within its shard""" + return self["kinesis"]["sequenceNumber"] + + def data_as_text(self) -> str: + """Decode binary encoded data as text""" + return base64.b64decode(self.data).decode("utf-8") + + def data_as_json(self) -> dict: + """Decode binary encoded data as json""" + return json.loads(self.data_as_text()) + + +class KinesisStreamRecord(DictWrapper): + @property + def aws_region(self) -> str: + """AWS region where the event originated eg: us-east-1""" + return self["awsRegion"] + + @property + def event_id(self) -> str: + """A globally unique identifier for the event that was recorded in this stream record.""" + return self["eventID"] + + @property + def event_name(self) -> str: + """Event type eg: aws:kinesis:record""" + return self["eventName"] + + @property + def event_source(self) -> str: + """The AWS service from which the Kinesis event originated. For Kinesis, this is aws:kinesis""" + return self["eventSource"] + + @property + def event_source_arn(self) -> str: + """The Amazon Resource Name (ARN) of the event source""" + return self["eventSourceARN"] + + @property + def event_version(self) -> str: + """The eventVersion key value contains a major and minor version in the form ..""" + return self["eventVersion"] + + @property + def invoke_identity_arn(self) -> str: + """The ARN for the identity used to invoke the Lambda Function""" + return self["invokeIdentityArn"] + + @property + def kinesis(self) -> KinesisStreamRecordPayload: + """Underlying Kinesis record associated with the event""" + return KinesisStreamRecordPayload(self._data) + + +class KinesisStreamEvent(DictWrapper): + """Kinesis stream event + + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-kinesis.html + """ + + @property + def records(self) -> Iterator[KinesisStreamRecord]: + for record in self["Records"]: + yield KinesisStreamRecord(record) diff --git a/aws_lambda_powertools/utilities/data_classes/s3_event.py b/aws_lambda_powertools/utilities/data_classes/s3_event.py new file mode 100644 index 00000000000..2670142d575 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/s3_event.py @@ -0,0 +1,189 @@ +from typing import Dict, Iterator, Optional +from urllib.parse import unquote_plus + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class S3Identity(DictWrapper): + @property + def principal_id(self) -> str: + return self["principalId"] + + +class S3RequestParameters(DictWrapper): + @property + def source_ip_address(self) -> str: + return self["requestParameters"]["sourceIPAddress"] + + +class S3Bucket(DictWrapper): + @property + def name(self) -> str: + return self["s3"]["bucket"]["name"] + + @property + def owner_identity(self) -> S3Identity: + return S3Identity(self["s3"]["bucket"]["ownerIdentity"]) + + @property + def arn(self) -> str: + return self["s3"]["bucket"]["arn"] + + +class S3Object(DictWrapper): + @property + def key(self) -> str: + """Object key""" + return self["s3"]["object"]["key"] + + @property + def size(self) -> int: + """Object byte size""" + return int(self["s3"]["object"]["size"]) + + @property + def etag(self) -> str: + """object eTag""" + return self["s3"]["object"]["eTag"] + + @property + def version_id(self) -> Optional[str]: + """Object version if bucket is versioning-enabled, otherwise null""" + return self["s3"]["object"].get("versionId") + + @property + def sequencer(self) -> str: + """A string representation of a hexadecimal value used to determine event sequence, + only used with PUTs and DELETEs + """ + return self["s3"]["object"]["sequencer"] + + +class S3Message(DictWrapper): + @property + def s3_schema_version(self) -> str: + return self["s3"]["s3SchemaVersion"] + + @property + def configuration_id(self) -> str: + """ID found in the bucket notification configuration""" + return self["s3"]["configurationId"] + + @property + def bucket(self) -> S3Bucket: + return S3Bucket(self._data) + + @property + def get_object(self) -> S3Object: + """Get the `object` property as an S3Object""" + # Note: this name conflicts with existing python builtins + return S3Object(self._data) + + +class S3EventRecordGlacierRestoreEventData(DictWrapper): + @property + def lifecycle_restoration_expiry_time(self) -> str: + """Time when the object restoration will be expired.""" + return self["restoreEventData"]["lifecycleRestorationExpiryTime"] + + @property + def lifecycle_restore_storage_class(self) -> str: + """Source storage class for restore""" + return self["restoreEventData"]["lifecycleRestoreStorageClass"] + + +class S3EventRecordGlacierEventData(DictWrapper): + @property + def restore_event_data(self) -> S3EventRecordGlacierRestoreEventData: + """The restoreEventData key contains attributes related to your restore request. + + The glacierEventData key is only visible for s3:ObjectRestore:Completed events + """ + return S3EventRecordGlacierRestoreEventData(self._data) + + +class S3EventRecord(DictWrapper): + @property + def event_version(self) -> str: + """The eventVersion key value contains a major and minor version in the form ..""" + return self["eventVersion"] + + @property + def event_source(self) -> str: + """The AWS service from which the S3 event originated. For S3, this is aws:s3""" + return self["eventSource"] + + @property + def aws_region(self) -> str: + """aws region eg: us-east-1""" + return self["awsRegion"] + + @property + def event_time(self) -> str: + """The time, in ISO-8601 format, for example, 1970-01-01T00:00:00.000Z, when S3 finished + processing the request""" + return self["eventTime"] + + @property + def event_name(self) -> str: + """Event type""" + return self["eventName"] + + @property + def user_identity(self) -> S3Identity: + return S3Identity(self["userIdentity"]) + + @property + def request_parameters(self) -> S3RequestParameters: + return S3RequestParameters(self._data) + + @property + def response_elements(self) -> Dict[str, str]: + """The responseElements key value is useful if you want to trace a request by following up with AWS Support. + + Both x-amz-request-id and x-amz-id-2 help Amazon S3 trace an individual request. These values are the same + as those that Amazon S3 returns in the response to the request that initiates the events, so they can be + used to match the event to the request. + """ + return self["responseElements"] + + @property + def s3(self) -> S3Message: + return S3Message(self._data) + + @property + def glacier_event_data(self) -> Optional[S3EventRecordGlacierEventData]: + """The glacierEventData key is only visible for s3:ObjectRestore:Completed events.""" + item = self.get("glacierEventData") + return None if item is None else S3EventRecordGlacierEventData(item) + + +class S3Event(DictWrapper): + """S3 event notification + + Documentation: + ------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-s3.html + - https://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html + - https://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html + """ + + @property + def records(self) -> Iterator[S3EventRecord]: + for record in self["Records"]: + yield S3EventRecord(record) + + @property + def record(self) -> S3EventRecord: + """Get the first s3 event record""" + return next(self.records) + + @property + def bucket_name(self) -> str: + """Get the bucket name for the first s3 event record""" + return self["Records"][0]["s3"]["bucket"]["name"] + + @property + def object_key(self) -> str: + """Get the object key for the first s3 event record and unquote plus""" + return unquote_plus(self["Records"][0]["s3"]["object"]["key"]) diff --git a/aws_lambda_powertools/utilities/data_classes/ses_event.py b/aws_lambda_powertools/utilities/data_classes/ses_event.py new file mode 100644 index 00000000000..518981618dc --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/ses_event.py @@ -0,0 +1,221 @@ +from typing import Iterator, List + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class SESMailHeader(DictWrapper): + @property + def name(self) -> str: + return self["name"] + + @property + def value(self) -> str: + return self["value"] + + +class SESMailCommonHeaders(DictWrapper): + @property + def return_path(self) -> str: + """The values in the Return-Path header of the email.""" + return self["returnPath"] + + @property + def get_from(self) -> List[str]: + """The values in the From header of the email.""" + # Note: this name conflicts with existing python builtins + return self["from"] + + @property + def date(self) -> List[str]: + """The date and time when Amazon SES received the message.""" + return self["date"] + + @property + def to(self) -> List[str]: + """The values in the To header of the email.""" + return self["to"] + + @property + def message_id(self) -> str: + """The ID of the original message.""" + return str(self["messageId"]) + + @property + def subject(self) -> str: + """The value of the Subject header for the email.""" + return str(self["subject"]) + + +class SESMail(DictWrapper): + @property + def timestamp(self) -> str: + """String that contains the time at which the email was received, in ISO8601 format.""" + return self["timestamp"] + + @property + def source(self) -> str: + """String that contains the email address (specifically, the envelope MAIL FROM address) + that the email was sent from.""" + return self["source"] + + @property + def message_id(self) -> str: + """String that contains the unique ID assigned to the email by Amazon SES. + + If the email was delivered to Amazon S3, the message ID is also the Amazon S3 object key that was + used to write the message to your Amazon S3 bucket.""" + return self["messageId"] + + @property + def destination(self) -> List[str]: + """A complete list of all recipient addresses (including To: and CC: recipients) + from the MIME headers of the incoming email.""" + return self["destination"] + + @property + def headers_truncated(self) -> bool: + """String that specifies whether the headers were truncated in the notification, which will happen + if the headers are larger than 10 KB. Possible values are true and false.""" + return bool(self["headersTruncated"]) + + @property + def headers(self) -> Iterator[SESMailHeader]: + """A list of Amazon SES headers and your custom headers. + Each header in the list has a name field and a value field""" + for header in self["headers"]: + yield SESMailHeader(header) + + @property + def common_headers(self) -> SESMailCommonHeaders: + """A list of headers common to all emails. Each header in the list is composed of a name and a value.""" + return SESMailCommonHeaders(self["commonHeaders"]) + + +class SESReceiptStatus(DictWrapper): + @property + def status(self) -> str: + return str(self["status"]) + + +class SESReceiptAction(DictWrapper): + @property + def get_type(self) -> str: + """String that indicates the type of action that was executed. + + Possible values are S3, SNS, Bounce, Lambda, Stop, and WorkMail + """ + # Note: this name conflicts with existing python builtins + return self["type"] + + @property + def function_arn(self) -> str: + """String that contains the ARN of the Lambda function that was triggered. + Present only for the Lambda action type.""" + return self["functionArn"] + + @property + def invocation_type(self) -> str: + """String that contains the invocation type of the Lambda function. Possible values are RequestResponse + and Event. Present only for the Lambda action type.""" + return self["invocationType"] + + +class SESReceipt(DictWrapper): + @property + def timestamp(self) -> str: + """String that specifies the date and time at which the action was triggered, in ISO 8601 format.""" + return self["timestamp"] + + @property + def processing_time_millis(self) -> int: + """String that specifies the period, in milliseconds, from the time Amazon SES received the message + to the time it triggered the action.""" + return int(self["processingTimeMillis"]) + + @property + def recipients(self) -> List[str]: + """A list of recipients (specifically, the envelope RCPT TO addresses) that were matched by the + active receipt rule. The addresses listed here may differ from those listed by the destination + field in the mail object.""" + return self["recipients"] + + @property + def spam_verdict(self) -> SESReceiptStatus: + """Object that indicates whether the message is spam.""" + return SESReceiptStatus(self["spamVerdict"]) + + @property + def virus_verdict(self) -> SESReceiptStatus: + """Object that indicates whether the message contains a virus.""" + return SESReceiptStatus(self["virusVerdict"]) + + @property + def spf_verdict(self) -> SESReceiptStatus: + """Object that indicates whether the Sender Policy Framework (SPF) check passed.""" + return SESReceiptStatus(self["spfVerdict"]) + + @property + def dmarc_verdict(self) -> SESReceiptStatus: + """Object that indicates whether the Domain-based Message Authentication, + Reporting & Conformance (DMARC) check passed.""" + return SESReceiptStatus(self["dmarcVerdict"]) + + @property + def action(self) -> SESReceiptAction: + """Object that encapsulates information about the action that was executed.""" + return SESReceiptAction(self["action"]) + + +class SESMessage(DictWrapper): + @property + def mail(self) -> SESMail: + return SESMail(self["ses"]["mail"]) + + @property + def receipt(self) -> SESReceipt: + return SESReceipt(self["ses"]["receipt"]) + + +class SESEventRecord(DictWrapper): + @property + def event_source(self) -> str: + """The AWS service from which the SES event record originated. For SES, this is aws:ses""" + return self["eventSource"] + + @property + def event_version(self) -> str: + """The eventVersion key value contains a major and minor version in the form ..""" + return self["eventVersion"] + + @property + def ses(self) -> SESMessage: + return SESMessage(self._data) + + +class SESEvent(DictWrapper): + """Amazon SES to receive message event data_classes + + NOTE: There is a 30-second timeout on RequestResponse invocations. + + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/services-ses.html + - https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-action-lambda.html + """ + + @property + def records(self) -> Iterator[SESEventRecord]: + for record in self["Records"]: + yield SESEventRecord(record) + + @property + def record(self) -> SESEventRecord: + return next(self.records) + + @property + def mail(self) -> SESMail: + return self.record.ses.mail + + @property + def receipt(self) -> SESReceipt: + return self.record.ses.receipt diff --git a/aws_lambda_powertools/utilities/data_classes/sns_event.py b/aws_lambda_powertools/utilities/data_classes/sns_event.py new file mode 100644 index 00000000000..e96b096fe6b --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/sns_event.py @@ -0,0 +1,123 @@ +from typing import Dict, Iterator + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class SNSMessageAttribute(DictWrapper): + @property + def get_type(self) -> str: + """The supported message attribute data types are String, String.Array, Number, and Binary.""" + # Note: this name conflicts with existing python builtins + return self["Type"] + + @property + def value(self) -> str: + """The user-specified message attribute value.""" + return self["Value"] + + +class SNSMessage(DictWrapper): + @property + def signature_version(self) -> str: + """Version of the Amazon SNS signature used.""" + return self["Sns"]["SignatureVersion"] + + @property + def timestamp(self) -> str: + """The time (GMT) when the subscription confirmation was sent.""" + return self["Sns"]["Timestamp"] + + @property + def signature(self) -> str: + """Base64-encoded "SHA1withRSA" signature of the Message, MessageId, Type, Timestamp, and TopicArn values.""" + return self["Sns"]["Signature"] + + @property + def signing_cert_url(self) -> str: + """The URL to the certificate that was used to sign the message.""" + return self["Sns"]["SigningCertUrl"] + + @property + def message_id(self) -> str: + """A Universally Unique Identifier, unique for each message published. + + For a message that Amazon SNS resends during a retry, the message ID of the original message is used.""" + return self["Sns"]["MessageId"] + + @property + def message(self) -> str: + """A string that describes the message. """ + return self["Sns"]["Message"] + + @property + def message_attributes(self) -> Dict[str, SNSMessageAttribute]: + return {k: SNSMessageAttribute(v) for (k, v) in self["Sns"]["MessageAttributes"].items()} + + @property + def get_type(self) -> str: + """The type of message. + + For a subscription confirmation, the type is SubscriptionConfirmation.""" + # Note: this name conflicts with existing python builtins + return self["Sns"]["Type"] + + @property + def unsubscribe_url(self) -> str: + """A URL that you can use to unsubscribe the endpoint from this topic. + + If you visit this URL, Amazon SNS unsubscribes the endpoint and stops sending notifications to this endpoint.""" + return self["Sns"]["UnsubscribeUrl"] + + @property + def topic_arn(self) -> str: + """The Amazon Resource Name (ARN) for the topic that this endpoint is subscribed to.""" + return self["Sns"]["TopicArn"] + + @property + def subject(self) -> str: + """The Subject parameter specified when the notification was published to the topic.""" + return self["Sns"]["Subject"] + + +class SNSEventRecord(DictWrapper): + @property + def event_version(self) -> str: + """Event version""" + return self["EventVersion"] + + @property + def event_subscription_arn(self) -> str: + return self["EventSubscriptionArn"] + + @property + def event_source(self) -> str: + """The AWS service from which the SNS event record originated. For SNS, this is aws:sns""" + return self["EventSource"] + + @property + def sns(self) -> SNSMessage: + return SNSMessage(self._data) + + +class SNSEvent(DictWrapper): + """SNS Event + + Documentation: + ------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-sns.html + """ + + @property + def records(self) -> Iterator[SNSEventRecord]: + for record in self["Records"]: + yield SNSEventRecord(record) + + @property + def record(self) -> SNSEventRecord: + """Return the first SNS event record""" + return next(self.records) + + @property + def sns_message(self) -> str: + """Return the message for the first sns event record""" + return self.record.sns.message diff --git a/aws_lambda_powertools/utilities/data_classes/sqs_event.py b/aws_lambda_powertools/utilities/data_classes/sqs_event.py new file mode 100644 index 00000000000..778b8f56f36 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/sqs_event.py @@ -0,0 +1,148 @@ +from typing import Dict, Iterator, Optional + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class SQSRecordAttributes(DictWrapper): + @property + def aws_trace_header(self) -> Optional[str]: + """Returns the AWS X-Ray trace header string.""" + return self.get("AWSTraceHeader") + + @property + def approximate_receive_count(self) -> str: + """Returns the number of times a message has been received across all queues but not deleted.""" + return self["ApproximateReceiveCount"] + + @property + def sent_timestamp(self) -> str: + """Returns the time the message was sent to the queue (epoch time in milliseconds).""" + return self["SentTimestamp"] + + @property + def sender_id(self) -> str: + """For an IAM user, returns the IAM user ID, For an IAM role, returns the IAM role ID""" + return self["SenderId"] + + @property + def approximate_first_receive_timestamp(self) -> str: + """Returns the time the message was first received from the queue (epoch time in milliseconds).""" + return self["ApproximateFirstReceiveTimestamp"] + + @property + def sequence_number(self) -> Optional[str]: + """The large, non-consecutive number that Amazon SQS assigns to each message.""" + return self.get("SequenceNumber") + + @property + def message_group_id(self) -> Optional[str]: + """The tag that specifies that a message belongs to a specific message group. + + Messages that belong to the same message group are always processed one by one, in a + strict order relative to the message group (however, messages that belong to different + message groups might be processed out of order).""" + return self.get("MessageGroupId") + + @property + def message_deduplication_id(self) -> Optional[str]: + """The token used for deduplication of sent messages. + + If a message with a particular message deduplication ID is sent successfully, any messages sent + with the same message deduplication ID are accepted successfully but aren't delivered during + the 5-minute deduplication interval.""" + return self.get("MessageDeduplicationId") + + +class SQSMessageAttribute(DictWrapper): + """The user-specified message attribute value.""" + + @property + def string_value(self) -> Optional[str]: + """Strings are Unicode with UTF-8 binary encoding.""" + return self["stringValue"] + + @property + def binary_value(self) -> Optional[str]: + """Binary type attributes can store any binary data, such as compressed data, encrypted data, or images. + + Base64-encoded binary data object""" + return self["binaryValue"] + + @property + def data_type(self) -> str: + """ The message attribute data type. Supported types include `String`, `Number`, and `Binary`.""" + return self["dataType"] + + +class SQSMessageAttributes(Dict[str, SQSMessageAttribute]): + def __getitem__(self, key: str) -> Optional[SQSMessageAttribute]: + item = super(SQSMessageAttributes, self).get(key) + return None if item is None else SQSMessageAttribute(item) + + +class SQSRecord(DictWrapper): + """An Amazon SQS message""" + + @property + def message_id(self) -> str: + """A unique identifier for the message. + + A messageId is considered unique across all AWS accounts for an extended period of time.""" + return self["messageId"] + + @property + def receipt_handle(self) -> str: + """An identifier associated with the act of receiving the message. + + A new receipt handle is returned every time you receive a message. When deleting a message, + you provide the last received receipt handle to delete the message.""" + return self["receiptHandle"] + + @property + def body(self) -> str: + """The message's contents (not URL-encoded).""" + return self["body"] + + @property + def attributes(self) -> SQSRecordAttributes: + """A map of the attributes requested in ReceiveMessage to their respective values.""" + return SQSRecordAttributes(self["attributes"]) + + @property + def message_attributes(self) -> SQSMessageAttributes: + """Each message attribute consists of a Name, Type, and Value.""" + return SQSMessageAttributes(self["messageAttributes"]) + + @property + def md5_of_body(self) -> str: + """An MD5 digest of the non-URL-encoded message body string.""" + return self["md5OfBody"] + + @property + def event_source(self) -> str: + """The AWS service from which the SQS record originated. For SQS, this is `aws:sqs` """ + return self["eventSource"] + + @property + def event_source_arn(self) -> str: + """The Amazon Resource Name (ARN) of the event source""" + return self["eventSourceARN"] + + @property + def aws_region(self) -> str: + """aws region eg: us-east-1""" + return self["awsRegion"] + + +class SQSEvent(DictWrapper): + """SQS Event + + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html + """ + + @property + def records(self) -> Iterator[SQSRecord]: + for record in self["Records"]: + yield SQSRecord(record) diff --git a/aws_lambda_powertools/utilities/validation/__init__.py b/aws_lambda_powertools/utilities/validation/__init__.py new file mode 100644 index 00000000000..94706e3214d --- /dev/null +++ b/aws_lambda_powertools/utilities/validation/__init__.py @@ -0,0 +1,14 @@ +""" +Simple validator to enforce incoming/outgoing event conforms with JSON Schema +""" + +from .exceptions import InvalidEnvelopeExpressionError, InvalidSchemaFormatError, SchemaValidationError +from .validator import validate, validator + +__all__ = [ + "validate", + "validator", + "InvalidSchemaFormatError", + "SchemaValidationError", + "InvalidEnvelopeExpressionError", +] diff --git a/aws_lambda_powertools/utilities/validation/base.py b/aws_lambda_powertools/utilities/validation/base.py new file mode 100644 index 00000000000..eab7f89064d --- /dev/null +++ b/aws_lambda_powertools/utilities/validation/base.py @@ -0,0 +1,65 @@ +import logging +from typing import Any, Dict + +import fastjsonschema +import jmespath +from jmespath.exceptions import LexerError + +from .exceptions import InvalidEnvelopeExpressionError, InvalidSchemaFormatError, SchemaValidationError +from .jmespath_functions import PowertoolsFunctions + +logger = logging.getLogger(__name__) + + +def validate_data_against_schema(data: Dict, schema: Dict): + """Validate dict data against given JSON Schema + + Parameters + ---------- + data : Dict + Data set to be validated + schema : Dict + JSON Schema to validate against + + Raises + ------ + SchemaValidationError + When schema validation fails against data set + InvalidSchemaFormatError + When JSON schema provided is invalid + """ + try: + fastjsonschema.validate(definition=schema, data=data) + except fastjsonschema.JsonSchemaException as e: + message = f"Failed schema validation. Error: {e.message}, Path: {e.path}, Data: {e.value}" # noqa: B306, E501 + raise SchemaValidationError(message) + except (TypeError, AttributeError) as e: + raise InvalidSchemaFormatError(f"Schema received: {schema}. Error: {e}") + + +def unwrap_event_from_envelope(data: Dict, envelope: str, jmespath_options: Dict) -> Any: + """Searches data using JMESPath expression + + Parameters + ---------- + data : Dict + Data set to be filtered + envelope : str + JMESPath expression to filter data against + jmespath_options : Dict + Alternative JMESPath options to be included when filtering expr + + Returns + ------- + Any + Data found using JMESPath expression given in envelope + """ + if not jmespath_options: + jmespath_options = {"custom_functions": PowertoolsFunctions()} + + try: + logger.debug(f"Envelope detected: {envelope}. JMESPath options: {jmespath_options}") + return jmespath.search(envelope, data, options=jmespath.Options(**jmespath_options)) + except (LexerError, TypeError, UnicodeError) as e: + message = f"Failed to unwrap event from envelope using expression. Error: {e} Exp: {envelope}, Data: {data}" # noqa: B306, E501 + raise InvalidEnvelopeExpressionError(message) diff --git a/aws_lambda_powertools/utilities/validation/envelopes.py b/aws_lambda_powertools/utilities/validation/envelopes.py new file mode 100644 index 00000000000..7bc84fce614 --- /dev/null +++ b/aws_lambda_powertools/utilities/validation/envelopes.py @@ -0,0 +1,10 @@ +"""Built-in envelopes""" + +API_GATEWAY_REST = "powertools_json(body)" +API_GATEWAY_HTTP = API_GATEWAY_REST +SQS = "Records[*].powertools_json(body)" +SNS = "Records[0].Sns.Message | powertools_json(@)" +EVENTBRIDGE = "detail" +CLOUDWATCH_EVENTS_SCHEDULED = EVENTBRIDGE +KINESIS_DATA_STREAM = "Records[*].kinesis.powertools_json(powertools_base64(data))" +CLOUDWATCH_LOGS = "awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]" diff --git a/aws_lambda_powertools/utilities/validation/exceptions.py b/aws_lambda_powertools/utilities/validation/exceptions.py new file mode 100644 index 00000000000..6b51fe5ca28 --- /dev/null +++ b/aws_lambda_powertools/utilities/validation/exceptions.py @@ -0,0 +1,14 @@ +class SchemaValidationError(Exception): + """When serialization fail schema validation""" + + pass + + +class InvalidSchemaFormatError(Exception): + """When JSON Schema is in invalid format""" + + pass + + +class InvalidEnvelopeExpressionError(Exception): + """When JMESPath fails to parse expression""" diff --git a/aws_lambda_powertools/utilities/validation/jmespath_functions.py b/aws_lambda_powertools/utilities/validation/jmespath_functions.py new file mode 100644 index 00000000000..b23ab477d6b --- /dev/null +++ b/aws_lambda_powertools/utilities/validation/jmespath_functions.py @@ -0,0 +1,22 @@ +import base64 +import gzip +import json + +import jmespath + + +class PowertoolsFunctions(jmespath.functions.Functions): + @jmespath.functions.signature({"types": ["string"]}) + def _func_powertools_json(self, value): + return json.loads(value) + + @jmespath.functions.signature({"types": ["string"]}) + def _func_powertools_base64(self, value): + return base64.b64decode(value).decode() + + @jmespath.functions.signature({"types": ["string"]}) + def _func_powertools_base64_gzip(self, value): + encoded = base64.b64decode(value) + uncompressed = gzip.decompress(encoded) + + return uncompressed.decode() diff --git a/aws_lambda_powertools/utilities/validation/validator.py b/aws_lambda_powertools/utilities/validation/validator.py new file mode 100644 index 00000000000..c404e90f55a --- /dev/null +++ b/aws_lambda_powertools/utilities/validation/validator.py @@ -0,0 +1,204 @@ +import logging +from typing import Any, Callable, Dict, Union + +from ...middleware_factory import lambda_handler_decorator +from .base import unwrap_event_from_envelope, validate_data_against_schema + +logger = logging.getLogger(__name__) + + +@lambda_handler_decorator +def validator( + handler: Callable, + event: Union[Dict, str], + context: Any, + inbound_schema: Dict = None, + outbound_schema: Dict = None, + envelope: str = None, + jmespath_options: Dict = None, +) -> Any: + """Lambda handler decorator to validate incoming/outbound data using a JSON Schema + + Example + ------- + + **Validate incoming event** + + from aws_lambda_powertools.utilities.validation import validator + + @validator(inbound_schema=json_schema_dict) + def handler(event, context): + return event + + **Validate incoming and outgoing event** + + from aws_lambda_powertools.utilities.validation import validator + + @validator(inbound_schema=json_schema_dict, outbound_schema=response_json_schema_dict) + def handler(event, context): + return event + + **Unwrap event before validating against actual payload - using built-in envelopes** + + from aws_lambda_powertools.utilities.validation import validator, envelopes + + @validator(inbound_schema=json_schema_dict, envelope=envelopes.API_GATEWAY_REST) + def handler(event, context): + return event + + **Unwrap event before validating against actual payload - using custom JMESPath expression** + + from aws_lambda_powertools.utilities.validation import validator + + @validator(inbound_schema=json_schema_dict, envelope="payload[*].my_data") + def handler(event, context): + return event + + **Unwrap and deserialize JSON string event before validating against actual payload - using built-in functions** + + from aws_lambda_powertools.utilities.validation import validator + + @validator(inbound_schema=json_schema_dict, envelope="Records[*].powertools_json(body)") + def handler(event, context): + return event + + **Unwrap, decode base64 and deserialize JSON string event before validating against actual payload - using built-in functions** # noqa: E501 + + from aws_lambda_powertools.utilities.validation import validator + + @validator(inbound_schema=json_schema_dict, envelope="Records[*].kinesis.powertools_json(powertools_base64(data))") + def handler(event, context): + return event + + **Unwrap, decompress ZIP archive and deserialize JSON string event before validating against actual payload - using built-in functions** # noqa: E501 + + from aws_lambda_powertools.utilities.validation import validator + + @validator(inbound_schema=json_schema_dict, envelope="awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]") + def handler(event, context): + return event + + Parameters + ---------- + handler : Callable + Method to annotate on + event : Dict + Lambda event to be validated + context : Any + Lambda context object + inbound_schema : Dict + JSON Schema to validate incoming event + outbound_schema : Dict + JSON Schema to validate outbound event + envelope : Dict + JMESPath expression to filter data against + jmespath_options : Dict + Alternative JMESPath options to be included when filtering expr + + Returns + ------- + Any + Lambda handler response + + Raises + ------ + SchemaValidationError + When schema validation fails against data set + InvalidSchemaFormatError + When JSON schema provided is invalid + InvalidEnvelopeExpressionError + When JMESPath expression to unwrap event is invalid + """ + if envelope: + event = unwrap_event_from_envelope(data=event, envelope=envelope, jmespath_options=jmespath_options) + + if inbound_schema: + logger.debug("Validating inbound event") + validate_data_against_schema(data=event, schema=inbound_schema) + + response = handler(event, context) + + if outbound_schema: + logger.debug("Validating outbound event") + validate_data_against_schema(data=response, schema=outbound_schema) + + return response + + +def validate(event: Dict, schema: Dict = None, envelope: str = None, jmespath_options: Dict = None): + """Standalone function to validate event data using a JSON Schema + + Typically used when you need more control over the validation process. + + **Validate event** + + from aws_lambda_powertools.utilities.validation import validate + + def handler(event, context): + validate(event=event, schema=json_schema_dict) + return event + + **Unwrap event before validating against actual payload - using built-in envelopes** + + from aws_lambda_powertools.utilities.validation import validate, envelopes + + def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope=envelopes.API_GATEWAY_REST) + return event + + **Unwrap event before validating against actual payload - using custom JMESPath expression** + + from aws_lambda_powertools.utilities.validation import validate + + def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope="payload[*].my_data") + return event + + **Unwrap and deserialize JSON string event before validating against actual payload - using built-in functions** + + from aws_lambda_powertools.utilities.validation import validate + + def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope="Records[*].powertools_json(body)") + return event + + **Unwrap, decode base64 and deserialize JSON string event before validating against actual payload - using built-in functions** + + from aws_lambda_powertools.utilities.validation import validate + + def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope="Records[*].kinesis.powertools_json(powertools_base64(data))") + return event + + **Unwrap, decompress ZIP archive and deserialize JSON string event before validating against actual payload - using built-in functions** # noqa: E501 + + from aws_lambda_powertools.utilities.validation import validate + + def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope="awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]") + return event + + Parameters + ---------- + event : Dict + Lambda event to be validated + schema : Dict + JSON Schema to validate incoming event + envelope : Dict + JMESPath expression to filter data against + jmespath_options : Dict + Alternative JMESPath options to be included when filtering expr + + Raises + ------ + SchemaValidationError + When schema validation fails against data set + InvalidSchemaFormatError + When JSON schema provided is invalid + InvalidEnvelopeExpressionError + When JMESPath expression to unwrap event is invalid + """ + if envelope: + event = unwrap_event_from_envelope(data=event, envelope=envelope, jmespath_options=jmespath_options) + + validate_data_against_schema(data=event, schema=schema) diff --git a/docs/content/core/logger.mdx b/docs/content/core/logger.mdx index 0f8bb7fa9b9..977ab58599e 100644 --- a/docs/content/core/logger.mdx +++ b/docs/content/core/logger.mdx @@ -222,21 +222,24 @@ If you ever forget to use `child` param, we will return an existing `Logger` wit ## Sampling debug logs -You can dynamically set a percentage of your logs to **DEBUG** level using `sample_rate` param or via env var `POWERTOOLS_LOGGER_SAMPLE_RATE`. +Sampling allows you to set your Logger Log Level as DEBUG based on a percentage of your concurrent/cold start invocations. You can set a sampling value of `0.0` to `1` (100%) using either `sample_rate` parameter or `POWERTOOLS_LOGGER_SAMPLE_RATE` env var. -Sampling calculation happens at the Logger class initialization. This means, when configured it, sampling it's more likely to happen during concurrent requests, or infrequent invocations as [new Lambda execution contexts are created](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), not reused. +This is useful when you want to troubleshoot an issue, say a sudden increase in concurrency, and you might not have enough information in your logs as Logger log level was understandably set as INFO. + +Sampling decision happens at the Logger class initialization, which only happens during a cold start. This means sampling may happen significantly more or less than you expect if you have a steady low number of invocations and thus few cold starts. - If you want this logic to happen on every invocation regardless whether Lambda reuses the execution environment or not, then create your Logger inside your Lambda handler. + If you want Logger to calculate sampling on every invocation, then please open a feature request.
```python:title=collect.py from aws_lambda_powertools import Logger # Sample 10% of debug logs e.g. 0.1 -logger = Logger(sample_rate=0.1) # highlight-line +logger = Logger(sample_rate=0.1, level="INFO") # highlight-line def handler(event, context): + logger.debug("Verifying whether order_id is present") if "order_id" in event: logger.info("Collecting payment") ... @@ -245,7 +248,21 @@ def handler(event, context):
Excerpt output in CloudWatch Logs -```json:title=cloudwatch_logs.json +```json:title=sampled_log_request_as_debug.json +{ + "timestamp": "2020-05-24 18:17:33,774", + "level": "DEBUG", // highlight-line + "location": "collect.handler:1", + "service": "payment", + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", + "cold_start": true, + "sampling_rate": 0.1, // highlight-line + "message": "Verifying whether order_id is present" +} + { "timestamp": "2020-05-24 18:17:33,774", "level": "INFO", @@ -260,6 +277,7 @@ def handler(event, context): "message": "Collecting payment" } ``` +
@@ -305,7 +323,7 @@ This can be fixed by either ensuring both has the `service` value as `payment`, You might want to continue to use the same date formatting style, or override `location` to display the `package.function_name:line_number` as you previously had. -Logger allows you to either change the format or suppress the following keys altogether at the initialization: `location`, `timestamp`, `level`, and `datefmt` +Logger allows you to either change the format or suppress the following keys altogether at the initialization: `location`, `timestamp`, `level`, `xray_trace_id`, and `datefmt` ```python from aws_lambda_powertools import Logger @@ -317,7 +335,7 @@ logger = Logger(stream=stdout, location="[%(funcName)s] %(module)s", datefmt="fa logger = Logger(stream=stdout, location=None) # highlight-line ``` -Alternatively, you can also change the order of the following log record keys via the `log_record_order` parameter: `level`, `location`, `message`, and `timestamp` +Alternatively, you can also change the order of the following log record keys via the `log_record_order` parameter: `level`, `location`, `message`, `xray_trace_id`, and `timestamp` ```python from aws_lambda_powertools import Logger @@ -358,3 +376,27 @@ except Exception: } ``` + + +## Testing your code + +When unit testing your code that makes use of `inject_lambda_context` decorator, you need to pass a dummy Lambda Context, or else Logger will fail. + +This is a Pytest sample that provides the minimum information necessary for Logger to succeed: + +```python:title=fake_lambda_context_for_logger.py +@pytest.fixture +def lambda_context(): + lambda_context = { + "function_name": "test", + "memory_limit_in_mb": 128, + "invoked_function_arn": "arn:aws:lambda:eu-west-1:809313241:function:test", + "aws_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", + } + + return namedtuple("LambdaContext", lambda_context.keys())(*lambda_context.values()) + +def test_lambda_handler(lambda_handler, lambda_context): + test_event = {'test': 'event'} + lambda_handler(test_event, lambda_context) # this will now have a Context object populated +``` diff --git a/docs/content/core/metrics.mdx b/docs/content/core/metrics.mdx index be1b9feaa5b..9c341dff1bb 100644 --- a/docs/content/core/metrics.mdx +++ b/docs/content/core/metrics.mdx @@ -251,11 +251,37 @@ This has the advantage of keeping cold start metric separate from your applicati ## Testing your code +### Environment variables + Use `POWERTOOLS_METRICS_NAMESPACE` and `POWERTOOLS_SERVICE_NAME` env vars when unit testing your code to ensure metric namespace and dimension objects are created, and your code doesn't fail validation. ```bash:title=pytest_metric_namespace.sh - POWERTOOLS_SERVICE_NAME="Example" POWERTOOLS_METRICS_NAMESPACE="Application" python -m pytest ``` -You can ignore this if you are explicitly setting namespace/default dimension by passing the `namespace` and `service` parameters when initializing Metrics: `metrics = Metrics(namespace=ApplicationName, service=ServiceName)`. +If you prefer setting environment variable for specific tests, and are using Pytest, you can use [monkeypatch](https://docs.pytest.org/en/latest/monkeypatch.html) fixture: + +```python:title=pytest_env_var.py +def test_namespace_env_var(monkeypatch): + # Set POWERTOOLS_METRICS_NAMESPACE before initializating Metrics + monkeypatch.setenv("POWERTOOLS_METRICS_NAMESPACE", namespace) + + metrics = Metrics() + ... +``` + +> Ignore this, if you are explicitly setting namespace/default dimension via `namespace` and `service` parameters: `metrics = Metrics(namespace=ApplicationName, service=ServiceName)` + +### Clearing metrics + +`Metrics` keep metrics in memory across multiple instances. If you need to test this behaviour, you can use the following Pytest fixture to ensure metrics are reset incl. cold start: + +```python:title=pytest_metrics_reset_fixture.py +@pytest.fixture(scope="function", autouse=True) +def reset_metric_set(): + # Clear out every metric data prior to every test + metrics = Metrics() + metrics.clear_metrics() + metrics_global.is_cold_start = True # ensure each test has cold start + yield +``` diff --git a/docs/content/index.mdx b/docs/content/index.mdx index ec2dd862e38..0c46af2958a 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -5,7 +5,7 @@ description: AWS Lambda Powertools Python import Note from "../src/components/Note" -Powertools is a suite of utilities for AWS Lambda functions that makes tracing with AWS X-Ray, structured logging and creating custom metrics asynchronously easier. +A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, and more. Looking for a quick run through of the core utilities?

@@ -24,12 +24,6 @@ Powertools is available in PyPi. You can use your favourite dependency managemen ```bash:title=hello_world.sh sam init --location https://github.com/aws-samples/cookiecutter-aws-sam-python ``` -* [Tracing](./core/tracer) - Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions -* [Logging](./core/logger) - Structured logging made easier, and decorator to enrich structured logging with key Lambda context details -* [Metrics](./core/metrics) - Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF) -* [Bring your own middleware](./utilities/middleware_factory) - Decorator factory to create your own middleware to run logic before, and after each Lambda invocation -* [Parameters utility](./utilities/parameters) - Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time -* [Batch utility](./utilities/batch) - Batch processing for AWS SQS, handles partial failure. ### Lambda Layer @@ -59,9 +53,10 @@ This will add a nested app stack with an output parameter `LayerVersionArn`, tha You can fetch the available versions via the API with: - ```bash - aws serverlessrepo list-application-versions --application-id arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer - ``` +```bash +aws serverlessrepo list-application-versions \ + --application-id arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer +``` ## Features @@ -71,8 +66,12 @@ Utility | Description [Logging](./core/logger) | Structured logging made easier, and decorator to enrich structured logging with key Lambda context details [Metrics](./core/metrics) | Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF) [Bring your own middleware](.//utilities/middleware_factory) | Decorator factory to create your own middleware to run logic before, and after each Lambda invocation -[Parameters utility](./utilities/parameters) | Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time -[Typing utility](./utilities/typing) | Static typing classes to speedup development in your IDE +[Parameters](./utilities/parameters) | Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time +[Typing](./utilities/typing) | Static typing classes to speedup development in your IDE +[Batch](./utilities/batch) | Handle partial failures for AWS SQS batch processing +[Validation](./utilities/validation) | JSON Schema validator for inbound events and responses +[Event source data classes](./utilities/data_classes) | Data classes describing the schema of common Lambda event triggers + ## Environment variables diff --git a/docs/content/media/utilities_data_classes.png b/docs/content/media/utilities_data_classes.png new file mode 100644 index 00000000000..94ed83bde97 Binary files /dev/null and b/docs/content/media/utilities_data_classes.png differ diff --git a/docs/content/utilities/data_classes.mdx b/docs/content/utilities/data_classes.mdx new file mode 100644 index 00000000000..22c6f4ec856 --- /dev/null +++ b/docs/content/utilities/data_classes.mdx @@ -0,0 +1,240 @@ +--- +title: Event Source Data Classes +description: Utility +--- + +import Note from "../../src/components/Note" + +The event source data classes utility provides classes describing the schema of common Lambda events triggers. + +**Key Features** + +* Type hinting and code completion for common event types +* Helper functions for decoding/deserializing nested fields +* Docstrings for fields contained in event schemas + +**Background** + +When authoring Lambda functions, you often need to understand the schema of the event dictionary which is passed to the +handler. There are several common event types which follow a specific schema, depending on the service triggering the +Lambda function. + + +## Utilizing the data classes + +The classes are initialized by passing in the Lambda event object into the constructor of the appropriate data class. +For example, if your Lambda function is being triggered by an API Gateway proxy integration, you can use the +`APIGatewayProxyEvent` class. + +![Utilities Data Classes](../media/utilities_data_classes.png) + + +## Supported event sources + +Event Source | Data_class +------------------------------------------------- | --------------------------------------------------------------------------------- +[API Gateway Proxy](#api-gateway-proxy) | `APIGatewayProxyEvent` +[API Gateway Proxy event v2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2` +[CloudWatch Logs](#cloudWatch-logs) | `CloudWatchLogsEvent` +[Cognito User Pool](#cognito-user-pool-triggers) | Multiple available under `cognito_user_pool_event` +[DynamoDB streams](#dynamoDB-streams) | `DynamoDBStreamEvent`, `DynamoDBRecordEventName` +[EventBridge](#eventbridge) | `EventBridgeEvent` +[Kinesis Data Stream](#kinesis-streams) | `KinesisStreamEvent` +[S3](#S3) | `S3Event` +[SES](#SES) | `SESEvent` +[SNS](#SNS) | `SNSEvent` +[SQS](#SQS) | `SQSEvent` + + + + The examples provided below are far from exhaustive - the data classes themselves are designed to provide a form of + documentation inherently (via autocompletion, types and docstrings). + + + +## API Gateway Proxy + +Typically used for API Gateway REST API or HTTP API using v1 proxy event. + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent + +def lambda_handler(event, context): + event: APIGatewayProxyEvent = APIGatewayProxyEvent(event) + request_context = event.request_context + identity = request_context.identity + + if 'helloworld' in event.path && event.http_method == 'GET': + user = identity.user + do_something_with(event.body, user) +``` + +## API Gateway Proxy v2 + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEventV2 + +def lambda_handler(event, context): + event: APIGatewayProxyEventV2 = APIGatewayProxyEventV2(event) + request_context = event.request_context + query_string_parameters = event.query_string_parameters + + if 'helloworld' in event.raw_path && request_context.http.method == 'POST': + do_something_with(event.body, query_string_parameters) +``` + +## CloudWatch Logs + +CloudWatch Logs events by default are compressed and base64 encoded. You can use the helper function provided to decode, +decompress and parse json data from the event. + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import CloudWatchLogsEvent + +def lambda_handler(event, context): + event: CloudWatchLogsEvent = CloudWatchLogsEvent(event) + + decompressed_log = event.parse_logs_data + log_events = decompressed_log.log_events + for event in log_events: + do_something_with(event.timestamp, event.message) +``` + +## Cognito User Pool + +Cognito User Pools have several [different Lambda trigger sources](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html#cognito-user-identity-pools-working-with-aws-lambda-trigger-sources), all of which map to a different data class, which +can be imported from `aws_lambda_powertools.data_classes.cognito_user_pool_event`: + +Trigger/Event Source | Data Class +------------------------------------------------- | ------------------------------------------------- +Custom message event | `data_classes.cognito_user_pool_event.CustomMessageTriggerEvent` +Post authentication | `data_classes.cognito_user_pool_event.PostAuthenticationTriggerEvent` +Post confirmation | `data_classes.cognito_user_pool_event.PostConfirmationTriggerEvent` +Pre authentication | `data_classes.cognito_user_pool_event.PreAuthenticationTriggerEvent` +Pre sign-up | `data_classes.cognito_user_pool_event.PreSignUpTriggerEvent` +Pre token generation | `data_classes.cognito_user_pool_event.PreTokenGenerationTriggerEvent` +User migration | `data_classes.cognito_user_pool_event.UserMigrationTriggerEvent` + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.cognito_user_pool_event import PostConfirmationTriggerEvent + +def lambda_handler(event, context): + event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event) + + user_attributes = user_attributes = event.request.user_attributes + do_something_with(user_attributes) +``` + +## DynamoDB Streams + +The DynamoDB data class utility provides the base class for `DynamoDBStreamEvent`, a typed class for +attributes values (`AttributeValue`), as well as enums for stream view type (`StreamViewType`) and event type +(`DynamoDBRecordEventName`). + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import DynamoDBStreamEvent, DynamoDBRecordEventName + +def lambda_handler(event, context): + event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) + + # Multiple records can be delivered in a single event + for record in event.records: + if record.event_name == DynamoDBRecordEventName.MODIFY: + do_something_with(record.dynamodb.new_image) + do_something_with(record.dynamodb.old_image) +``` + +## EventBridge + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import EventBridgeEvent + +def lambda_handler(event, context): + event: EventBridgeEvent = EventBridgeEvent(event) + do_something_with(event.detail) + +``` + +## Kinesis streams + +Kinesis events by default contain base64 encoded data. You can use the helper function to access the data either as json +or plain text, depending on the original payload. + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import KinesisStreamEvent + +def lambda_handler(event, context): + event: KinesisStreamEvent = KinesisStreamEvent(event) + + # if data was delivered as json + data = event.data_as_text() + + # if data was delivered as text + data = event.data_as_json() + + do_something_with(data) + +``` + +## S3 + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import S3Event + +def lambda_handler(event, context): + event: S3Event = S3Event(event) + bucket_name = event.bucket_name + + # Multiple records can be delivered in a single event + for record in event.records: + object_key = record.s3.get_object.key + + do_something_with(f'{bucket_name}/{object_key}') + +``` + +## SES + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import SESEvent + +def lambda_handler(event, context): + event: SESEvent = SESEvent(event) + + # Multiple records can be delivered in a single event + for record in event.records: + mail = record.ses.mail + common_headers = list(mail.common_headers) + + do_something_with(common_headers.to, common_headers.subject) + +``` + +## SNS + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import SNSEvent + +def lambda_handler(event, context): + event: SNSEvent = SNSEvent(event) + + # Multiple records can be delivered in a single event + for record in event.records: + message = record.sns.message + subject = record.sns.subject + + do_something_with(subject, message) +``` + +## SQS + +```python:title=lambda_app.py +from aws_lambda_powertools.utilities.data_classes import SQSEvent + +def lambda_handler(event, context): + event: SQSEvent = SQSEvent(event) + + # Multiple records can be delivered in a single event + for record in event.records: + do_something_with(record.body) +``` diff --git a/docs/content/utilities/validation.mdx b/docs/content/utilities/validation.mdx new file mode 100644 index 00000000000..74b762a096e --- /dev/null +++ b/docs/content/utilities/validation.mdx @@ -0,0 +1,236 @@ +--- +title: Validation +description: Utility +--- + + +import Note from "../../src/components/Note" + +This utility provides JSON Schema validation for events and responses, including JMESPath support to unwrap events before validation. + +**Key features** + +* Validate incoming event and response +* JMESPath support to unwrap events before validation applies +* Built-in envelopes to unwrap popular event sources payloads + +## Validating events + +You can validate inbound and outbound events using `validator` decorator. + +You can also use the standalone `validate` function, if you want more control over the validation process such as handling a validation error. + +We support any JSONSchema draft supported by [fastjsonschema](https://horejsek.github.io/python-fastjsonschema/) library. + + + Both validator decorator and validate standalone function expects your JSON Schema to be + a dictionary, not a filename. + + + +### Validator decorator + +**Validator** decorator is typically used to validate either inbound or functions' response. + +It will fail fast with `SchemaValidationError` exception if event or response doesn't conform with given JSON Schema. + +```python:title=validator_decorator.py +from aws_lambda_powertools.utilities.validation import validator + +json_schema_dict = {..} +response_json_schema_dict = {..} + +@validator(inbound_schema=json_schema_dict, outbound_schema=response_json_schema_dict) +def handler(event, context): + return event +``` + +**NOTE**: It's not a requirement to validate both inbound and outbound schemas - You can either use one, or both. + +### Validate function + +**Validate** standalone function is typically used within the Lambda handler, or any other methods that perform data validation. + +You can also gracefully handle schema validation errors by catching `SchemaValidationError` exception. + +```python:title=validator_decorator.py +from aws_lambda_powertools.utilities.validation import validate +from aws_lambda_powertools.utilities.validation.exceptions import SchemaValidationError + +json_schema_dict = {..} + +def handler(event, context): + try: + validate(event=event, schema=json_schema_dict) + except SchemaValidationError as e: + # do something before re-raising + raise + + return event +``` + +## Unwrapping events prior to validation + +You might want to validate only a portion of your event - This is where the `envelope` parameter is for. + +Envelopes are [JMESPath expressions](https://jmespath.org/tutorial.html) to extract a portion of JSON you want before applying JSON Schema validation. + +Here is a sample custom EventBridge event, where we only validate what's inside the `detail` key: + +```json:title=sample_wrapped_event.json +{ + "id": "cdc73f9d-aea9-11e3-9d5a-835b769c0d9c", + "detail-type": "Scheduled Event", + "source": "aws.events", + "account": "123456789012", + "time": "1970-01-01T00:00:00Z", + "region": "us-east-1", + "resources": ["arn:aws:events:us-east-1:123456789012:rule/ExampleRule"], + "detail": {"message": "hello hello", "username": "blah blah"}, // highlight-line +} +``` + +Here is how you'd use the `envelope` parameter to extract the payload inside the `detail` key before validating: + +```python:title=unwrapping_events.py +from aws_lambda_powertools.utilities.validation import validator, validate + +json_schema_dict = {..} + +@validator(inbound_schema=json_schema_dict, envelope="detail") # highlight-line +def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope="detail") # highlight-line + return event +``` + +This is quite powerful because you can use JMESPath Query language to extract records from [arrays, slice and dice](https://jmespath.org/tutorial.html#list-and-slice-projections), to [pipe expressions](https://jmespath.org/tutorial.html#pipe-expressions) and [function expressions](https://jmespath.org/tutorial.html#functions), where you'd extract what you need before validating the actual payload. + +## Built-in envelopes + +This utility comes with built-in envelopes to easily extract the payload from popular event sources. + +```python:title=unwrapping_popular_event_sources.py +from aws_lambda_powertools.utilities.validation import envelopes, validate, validator + +json_schema_dict = {..} + +@validator(inbound_schema=json_schema_dict, envelope=envelopes.EVENTBRIDGE) # highlight-line +def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope=envelopes.EVENTBRIDGE) # highlight-line + return event +``` + +Here is a handy table with built-in envelopes along with their JMESPath expressions in case you want to build your own. + +Envelope name | JMESPath expression +------------------------------------------------- | --------------------------------------------------------------------------------- +**API_GATEWAY_REST** | "powertools_json(body)" +**API_GATEWAY_HTTP** | "powertools_json(body)" +**SQS** | "Records[*].powertools_json(body)" +**SNS** | "Records[0].Sns.Message | powertools_json(@)" +**EVENTBRIDGE** | "detail" +**CLOUDWATCH_EVENTS_SCHEDULED** | "detail" +**KINESIS_DATA_STREAM** | "Records[*].kinesis.powertools_json(powertools_base64(data))" +**CLOUDWATCH_LOGS** | "awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]" + +## Built-in JMESPath functions + +You might have events or responses that contain non-encoded JSON, where you need to decode before validating them. + +You can use our built-in JMESPath functions within your expressions to do exactly that to decode JSON Strings, base64, and uncompress gzip data. + + + We use these for built-in envelopes to easily to decode and unwrap events from sources like Kinesis, CloudWatch Logs, etc. + + +### powertools_json function + +Use `powertools_json` function to decode any JSON String. + +This sample will decode the value within the `data` key into a valid JSON before we can validate it. + +```python:title=powertools_json_jmespath_function.py +from aws_lambda_powertools.utilities.validation import validate + +json_schema_dict = {..} +sample_event = { + 'data': '{"payload": {"message": "hello hello", "username": "blah blah"}}' +} + +def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope="powertools_json(data)") # highlight-line + return event + +handler(event=sample_event, context={}) +``` + +### powertools_base64 function + +Use `powertools_base64` function to decode any base64 data. + +This sample will decode the base64 value within the `data` key, and decode the JSON string into a valid JSON before we can validate it. + +```python:title=powertools_json_jmespath_function.py +from aws_lambda_powertools.utilities.validation import validate + +json_schema_dict = {..} +sample_event = { + "data": "eyJtZXNzYWdlIjogImhlbGxvIGhlbGxvIiwgInVzZXJuYW1lIjogImJsYWggYmxhaCJ9=" +} + +def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope="powertools_json(powertools_base64(data))") # highlight-line + return event + +handler(event=sample_event, context={}) +``` + +### powertools_base64_gzip function + +Use `powertools_base64_gzip` function to decompress and decode base64 data. + +This sample will decompress and decode base64 data, then use JMESPath pipeline expression to pass the result for decoding its JSON string. + +```python:title=powertools_json_jmespath_function.py +from aws_lambda_powertools.utilities.validation import validate + +json_schema_dict = {..} +sample_event = { + "data": "H4sIACZAXl8C/52PzUrEMBhFX2UILpX8tPbHXWHqIOiq3Q1F0ubrWEiakqTWofTdTYYB0YWL2d5zvnuTFellBIOedoiyKH5M0iwnlKH7HZL6dDB6ngLDfLFYctUKjie9gHFaS/sAX1xNEq525QxwFXRGGMEkx4Th491rUZdV3YiIZ6Ljfd+lfSyAtZloacQgAkqSJCGhxM6t7cwwuUGPz4N0YKyvO6I9WDeMPMSo8Z4Ca/kJ6vMEYW5f1MX7W1lVxaG8vqX8hNFdjlc0iCBBSF4ERT/3Pl7RbMGMXF2KZMh/C+gDpNS7RRsp0OaRGzx0/t8e0jgmcczyLCWEePhni/23JWalzjdu0a3ZvgEaNLXeugEAAA==" +} + +def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope="powertools_base64_gzip(data) | powertools_json(@)") # highlight-line + return event + +handler(event=sample_event, context={}) +``` + +## Bring your own JMESPath function + + + This should only be used for advanced use cases where you have special formats not covered by the built-in functions. +

+ This will replace all provided built-in functions such as `powertools_json`, so you will no longer be able to use them. +
+ +For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions) and any additional option via `jmespath_options` param. + +```python:title=custom_jmespath_function +from aws_lambda_powertools.utilities.validation import validate +from jmespath import functions + +json_schema_dict = {..} + +class CustomFunctions(functions.Functions): + + @functions.signature({'types': ['string']}) + def _func_special_decoder(self, s): + return my_custom_decoder_logic(s) + +custom_jmespath_options = {"custom_functions": CustomFunctions()} + +def handler(event, context): + validate(event=event, schema=json_schema_dict, envelope="", jmespath_options=**custom_jmespath_options) # highlight-line + return event +``` diff --git a/docs/gatsby-config.js b/docs/gatsby-config.js index a4286e0d55f..087f23a9634 100644 --- a/docs/gatsby-config.js +++ b/docs/gatsby-config.js @@ -34,6 +34,8 @@ module.exports = { 'utilities/parameters', 'utilities/batch', 'utilities/typing', + 'utilities/validation', + 'utilities/data_classes' ], }, navConfig: { diff --git a/docs/src/gatsby-theme-apollo-core/components/flex-wrapper.js b/docs/src/gatsby-theme-apollo-core/components/flex-wrapper.js index 94e1cd42e24..773b6751d0c 100644 --- a/docs/src/gatsby-theme-apollo-core/components/flex-wrapper.js +++ b/docs/src/gatsby-theme-apollo-core/components/flex-wrapper.js @@ -3,7 +3,7 @@ import styled from '@emotion/styled'; const FlexWrapper = styled.div({ display: 'flex', minHeight: '100vh', - maxWidth: 1600, + maxWidth: '87vw', margin: '0 auto' }); diff --git a/docs/src/styles/global.css b/docs/src/styles/global.css index deeda92fac5..fb8ce5a54e6 100644 --- a/docs/src/styles/global.css +++ b/docs/src/styles/global.css @@ -25,3 +25,7 @@ tr > td { .token.property { color: darkmagenta !important } + +blockquote { + font-size: 1.15em +} diff --git a/poetry.lock b/poetry.lock index e7b7cdff1db..303b2447966 100644 --- a/poetry.lock +++ b/poetry.lock @@ -104,13 +104,12 @@ description = "Classes Without Boilerplate" name = "attrs" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "19.3.0" +version = "20.1.0" [package.extras] -azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"] -dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"] -docs = ["sphinx", "zope.interface"] -tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] [[package]] category = "main" @@ -218,10 +217,11 @@ version = "7.1.2" [[package]] category = "dev" description = "Cross-platform colored terminal text." +marker = "sys_platform == \"win32\" or platform_system == \"Windows\" or python_version > \"3.4\"" name = "colorama" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.4.1" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.4.3" [[package]] category = "dev" @@ -229,7 +229,7 @@ description = "Code coverage measurement for Python" name = "coverage" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "5.2" +version = "5.2.1" [package.dependencies] [package.dependencies.toml] @@ -270,7 +270,7 @@ description = "Fastest Python implementation of JSON schema" name = "fastjsonschema" optional = false python-versions = "*" -version = "2.14.4" +version = "2.14.5" [package.extras] devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] @@ -589,7 +589,7 @@ description = "More routines for operating on iterables, beyond itertools" name = "more-itertools" optional = false python-versions = ">=3.5" -version = "8.4.0" +version = "8.5.0" [[package]] category = "dev" @@ -733,7 +733,7 @@ description = "Pytest plugin for measuring coverage." name = "pytest-cov" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.10.0" +version = "2.10.1" [package.dependencies] coverage = ">=4.4" @@ -781,14 +781,17 @@ description = "Code Metrics in Python" name = "radon" optional = false python-versions = "*" -version = "4.1.0" +version = "4.2.0" [package.dependencies] -colorama = "0.4.1" flake8-polyfill = "*" future = "*" mando = ">=0.6,<0.7" +[package.dependencies.colorama] +python = ">=3.5" +version = ">=0.4.1" + [[package]] category = "dev" description = "Alternative regular expression module, to replace re." @@ -848,7 +851,7 @@ description = "Manage dynamic plugins for Python applications" name = "stevedore" optional = false python-versions = ">=3.6" -version = "3.1.0" +version = "3.2.0" [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" @@ -892,7 +895,7 @@ description = "Backported and Experimental Type Hints for Python 3.5+" name = "typing-extensions" optional = false python-versions = "*" -version = "3.7.4.2" +version = "3.7.4.3" [[package]] category = "main" @@ -900,7 +903,7 @@ description = "HTTP library with thread-safe connection pooling, file post, and name = "urllib3" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "1.25.9" +version = "1.25.10" [package.extras] brotli = ["brotlipy (>=0.6.0)"] @@ -942,15 +945,20 @@ description = "Yet another URL library" name = "yarl" optional = false python-versions = ">=3.5" -version = "1.4.2" +version = "1.5.1" [package.dependencies] idna = ">=2.0" multidict = ">=4.0" +[package.dependencies.typing-extensions] +python = "<3.8" +version = ">=3.7.4" + [[package]] category = "main" description = "Backport of pathlib-compatible object wrapper for zip files" +marker = "python_version < \"3.8\"" name = "zipp" optional = false python-versions = ">=3.6" @@ -960,8 +968,11 @@ version = "3.1.0" docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] testing = ["jaraco.itertools", "func-timeout"] +[extras] +jmespath = ["jmespath"] + [metadata] -content-hash = "18607a712e4a4a05de7350ecbcf26327a4fb45bb8609dc7f3d19b7610c2faafc" +content-hash = "73a725bb90970d6a99d39eb2fc833937e4576f5fe729d60e9b26d505e08a6ea0" lock-version = "1.0" python-versions = "^3.6" @@ -1005,8 +1016,8 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, - {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, + {file = "attrs-20.1.0-py2.py3-none-any.whl", hash = "sha256:2867b7b9f8326499ab5b0e2d12801fa5c98842d2cbd22b35112ae04bf85b4dff"}, + {file = "attrs-20.1.0.tar.gz", hash = "sha256:0ef97238856430dcf9228e07f316aefc17e8939fc8507e18c6501b761ef1a42a"}, ] aws-xray-sdk = [ {file = "aws-xray-sdk-2.6.0.tar.gz", hash = "sha256:abf5b90f740e1f402e23414c9670e59cb9772e235e271fef2bce62b9100cbc77"}, @@ -1041,44 +1052,44 @@ click = [ {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, ] colorama = [ - {file = "colorama-0.4.1-py2.py3-none-any.whl", hash = "sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"}, - {file = "colorama-0.4.1.tar.gz", hash = "sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d"}, + {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, + {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, ] coverage = [ - {file = "coverage-5.2-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:d9ad0a988ae20face62520785ec3595a5e64f35a21762a57d115dae0b8fb894a"}, - {file = "coverage-5.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:4bb385a747e6ae8a65290b3df60d6c8a692a5599dc66c9fa3520e667886f2e10"}, - {file = "coverage-5.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:9702e2cb1c6dec01fb8e1a64c015817c0800a6eca287552c47a5ee0ebddccf62"}, - {file = "coverage-5.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:42fa45a29f1059eda4d3c7b509589cc0343cd6bbf083d6118216830cd1a51613"}, - {file = "coverage-5.2-cp27-cp27m-win32.whl", hash = "sha256:41d88736c42f4a22c494c32cc48a05828236e37c991bd9760f8923415e3169e4"}, - {file = "coverage-5.2-cp27-cp27m-win_amd64.whl", hash = "sha256:bbb387811f7a18bdc61a2ea3d102be0c7e239b0db9c83be7bfa50f095db5b92a"}, - {file = "coverage-5.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3740b796015b889e46c260ff18b84683fa2e30f0f75a171fb10d2bf9fb91fc70"}, - {file = "coverage-5.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ebf2431b2d457ae5217f3a1179533c456f3272ded16f8ed0b32961a6d90e38ee"}, - {file = "coverage-5.2-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:d54d7ea74cc00482a2410d63bf10aa34ebe1c49ac50779652106c867f9986d6b"}, - {file = "coverage-5.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:87bdc8135b8ee739840eee19b184804e5d57f518578ffc797f5afa2c3c297913"}, - {file = "coverage-5.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:ed9a21502e9223f563e071759f769c3d6a2e1ba5328c31e86830368e8d78bc9c"}, - {file = "coverage-5.2-cp35-cp35m-win32.whl", hash = "sha256:509294f3e76d3f26b35083973fbc952e01e1727656d979b11182f273f08aa80b"}, - {file = "coverage-5.2-cp35-cp35m-win_amd64.whl", hash = "sha256:ca63dae130a2e788f2b249200f01d7fa240f24da0596501d387a50e57aa7075e"}, - {file = "coverage-5.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:5c74c5b6045969b07c9fb36b665c9cac84d6c174a809fc1b21bdc06c7836d9a0"}, - {file = "coverage-5.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c32aa13cc3fe86b0f744dfe35a7f879ee33ac0a560684fef0f3e1580352b818f"}, - {file = "coverage-5.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1e58fca3d9ec1a423f1b7f2aa34af4f733cbfa9020c8fe39ca451b6071237405"}, - {file = "coverage-5.2-cp36-cp36m-win32.whl", hash = "sha256:3b2c34690f613525672697910894b60d15800ac7e779fbd0fccf532486c1ba40"}, - {file = "coverage-5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a4d511012beb967a39580ba7d2549edf1e6865a33e5fe51e4dce550522b3ac0e"}, - {file = "coverage-5.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:32ecee61a43be509b91a526819717d5e5650e009a8d5eda8631a59c721d5f3b6"}, - {file = "coverage-5.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6f91b4492c5cde83bfe462f5b2b997cdf96a138f7c58b1140f05de5751623cf1"}, - {file = "coverage-5.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bfcc811883699ed49afc58b1ed9f80428a18eb9166422bce3c31a53dba00fd1d"}, - {file = "coverage-5.2-cp37-cp37m-win32.whl", hash = "sha256:60a3d36297b65c7f78329b80120f72947140f45b5c7a017ea730f9112b40f2ec"}, - {file = "coverage-5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:12eaccd86d9a373aea59869bc9cfa0ab6ba8b1477752110cb4c10d165474f703"}, - {file = "coverage-5.2-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:d82db1b9a92cb5c67661ca6616bdca6ff931deceebb98eecbd328812dab52032"}, - {file = "coverage-5.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:214eb2110217f2636a9329bc766507ab71a3a06a8ea30cdeebb47c24dce5972d"}, - {file = "coverage-5.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8a3decd12e7934d0254939e2bf434bf04a5890c5bf91a982685021786a08087e"}, - {file = "coverage-5.2-cp38-cp38-win32.whl", hash = "sha256:1dcebae667b73fd4aa69237e6afb39abc2f27520f2358590c1b13dd90e32abe7"}, - {file = "coverage-5.2-cp38-cp38-win_amd64.whl", hash = "sha256:f50632ef2d749f541ca8e6c07c9928a37f87505ce3a9f20c8446ad310f1aa87b"}, - {file = "coverage-5.2-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:7403675df5e27745571aba1c957c7da2dacb537c21e14007ec3a417bf31f7f3d"}, - {file = "coverage-5.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:0fc4e0d91350d6f43ef6a61f64a48e917637e1dcfcba4b4b7d543c628ef82c2d"}, - {file = "coverage-5.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:25fe74b5b2f1b4abb11e103bb7984daca8f8292683957d0738cd692f6a7cc64c"}, - {file = "coverage-5.2-cp39-cp39-win32.whl", hash = "sha256:d67599521dff98ec8c34cd9652cbcfe16ed076a2209625fca9dc7419b6370e5c"}, - {file = "coverage-5.2-cp39-cp39-win_amd64.whl", hash = "sha256:10f2a618a6e75adf64329f828a6a5b40244c1c50f5ef4ce4109e904e69c71bd2"}, - {file = "coverage-5.2.tar.gz", hash = "sha256:1874bdc943654ba46d28f179c1846f5710eda3aeb265ff029e0ac2b52daae404"}, + {file = "coverage-5.2.1-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:40f70f81be4d34f8d491e55936904db5c527b0711b2a46513641a5729783c2e4"}, + {file = "coverage-5.2.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:675192fca634f0df69af3493a48224f211f8db4e84452b08d5fcebb9167adb01"}, + {file = "coverage-5.2.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2fcc8b58953d74d199a1a4d633df8146f0ac36c4e720b4a1997e9b6327af43a8"}, + {file = "coverage-5.2.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:64c4f340338c68c463f1b56e3f2f0423f7b17ba6c3febae80b81f0e093077f59"}, + {file = "coverage-5.2.1-cp27-cp27m-win32.whl", hash = "sha256:52f185ffd3291196dc1aae506b42e178a592b0b60a8610b108e6ad892cfc1bb3"}, + {file = "coverage-5.2.1-cp27-cp27m-win_amd64.whl", hash = "sha256:30bc103587e0d3df9e52cd9da1dd915265a22fad0b72afe54daf840c984b564f"}, + {file = "coverage-5.2.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9ea749fd447ce7fb1ac71f7616371f04054d969d412d37611716721931e36efd"}, + {file = "coverage-5.2.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ce7866f29d3025b5b34c2e944e66ebef0d92e4a4f2463f7266daa03a1332a651"}, + {file = "coverage-5.2.1-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:4869ab1c1ed33953bb2433ce7b894a28d724b7aa76c19b11e2878034a4e4680b"}, + {file = "coverage-5.2.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a3ee9c793ffefe2944d3a2bd928a0e436cd0ac2d9e3723152d6fd5398838ce7d"}, + {file = "coverage-5.2.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:28f42dc5172ebdc32622a2c3f7ead1b836cdbf253569ae5673f499e35db0bac3"}, + {file = "coverage-5.2.1-cp35-cp35m-win32.whl", hash = "sha256:e26c993bd4b220429d4ec8c1468eca445a4064a61c74ca08da7429af9bc53bb0"}, + {file = "coverage-5.2.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4186fc95c9febeab5681bc3248553d5ec8c2999b8424d4fc3a39c9cba5796962"}, + {file = "coverage-5.2.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:b360d8fd88d2bad01cb953d81fd2edd4be539df7bfec41e8753fe9f4456a5082"}, + {file = "coverage-5.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:1adb6be0dcef0cf9434619d3b892772fdb48e793300f9d762e480e043bd8e716"}, + {file = "coverage-5.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:098a703d913be6fbd146a8c50cc76513d726b022d170e5e98dc56d958fd592fb"}, + {file = "coverage-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:962c44070c281d86398aeb8f64e1bf37816a4dfc6f4c0f114756b14fc575621d"}, + {file = "coverage-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1ed2bdb27b4c9fc87058a1cb751c4df8752002143ed393899edb82b131e0546"}, + {file = "coverage-5.2.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:c890728a93fffd0407d7d37c1e6083ff3f9f211c83b4316fae3778417eab9811"}, + {file = "coverage-5.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:538f2fd5eb64366f37c97fdb3077d665fa946d2b6d95447622292f38407f9258"}, + {file = "coverage-5.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:27ca5a2bc04d68f0776f2cdcb8bbd508bbe430a7bf9c02315cd05fb1d86d0034"}, + {file = "coverage-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:aab75d99f3f2874733946a7648ce87a50019eb90baef931698f96b76b6769a46"}, + {file = "coverage-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c2ff24df02a125b7b346c4c9078c8936da06964cc2d276292c357d64378158f8"}, + {file = "coverage-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:304fbe451698373dc6653772c72c5d5e883a4aadaf20343592a7abb2e643dae0"}, + {file = "coverage-5.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c96472b8ca5dc135fb0aa62f79b033f02aa434fb03a8b190600a5ae4102df1fd"}, + {file = "coverage-5.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8505e614c983834239f865da2dd336dcf9d72776b951d5dfa5ac36b987726e1b"}, + {file = "coverage-5.2.1-cp38-cp38-win32.whl", hash = "sha256:700997b77cfab016533b3e7dbc03b71d33ee4df1d79f2463a318ca0263fc29dd"}, + {file = "coverage-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:46794c815e56f1431c66d81943fa90721bb858375fb36e5903697d5eef88627d"}, + {file = "coverage-5.2.1-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:16042dc7f8e632e0dcd5206a5095ebd18cb1d005f4c89694f7f8aafd96dd43a3"}, + {file = "coverage-5.2.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c1bbb628ed5192124889b51204de27c575b3ffc05a5a91307e7640eff1d48da4"}, + {file = "coverage-5.2.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4f6428b55d2916a69f8d6453e48a505c07b2245653b0aa9f0dee38785939f5e4"}, + {file = "coverage-5.2.1-cp39-cp39-win32.whl", hash = "sha256:9e536783a5acee79a9b308be97d3952b662748c4037b6a24cbb339dc7ed8eb89"}, + {file = "coverage-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:b8f58c7db64d8f27078cbf2a4391af6aa4e4767cc08b37555c4ae064b8558d9b"}, + {file = "coverage-5.2.1.tar.gz", hash = "sha256:a34cb28e0747ea15e82d13e14de606747e9e484fb28d63c999483f5d5188e89b"}, ] dataclasses = [ {file = "dataclasses-0.7-py3-none-any.whl", hash = "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836"}, @@ -1093,8 +1104,8 @@ eradicate = [ {file = "eradicate-1.0.tar.gz", hash = "sha256:4ffda82aae6fd49dfffa777a857cb758d77502a1f2e0f54c9ac5155a39d2d01a"}, ] fastjsonschema = [ - {file = "fastjsonschema-2.14.4-py3-none-any.whl", hash = "sha256:02a39b518077cc73c1a537f27776527dc6c1e5012d530eb8ac0d1062efbabff7"}, - {file = "fastjsonschema-2.14.4.tar.gz", hash = "sha256:7292cde54f1c30172f78557509ad4cb152f374087fc844bd113a83e2ac494dd6"}, + {file = "fastjsonschema-2.14.5-py3-none-any.whl", hash = "sha256:467593c61f5ba8307205a3536313a774b37df91c9a937c5267c11aee5256e77e"}, + {file = "fastjsonschema-2.14.5.tar.gz", hash = "sha256:afbc235655f06356e46caa80190512e4d9222abfaca856041be5a74c665fa094"}, ] flake8 = [ {file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"}, @@ -1223,8 +1234,8 @@ mccabe = [ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] more-itertools = [ - {file = "more-itertools-8.4.0.tar.gz", hash = "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5"}, - {file = "more_itertools-8.4.0-py3-none-any.whl", hash = "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"}, + {file = "more-itertools-8.5.0.tar.gz", hash = "sha256:6f83822ae94818eae2612063a5101a7311e68ae8002005b5e05f03fd74a86a20"}, + {file = "more_itertools-8.5.0-py3-none-any.whl", hash = "sha256:9b30f12df9393f0d28af9210ff8efe48d10c94f73e5daf886f10c4b0b0b4f03c"}, ] multidict = [ {file = "multidict-4.7.6-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000"}, @@ -1288,8 +1299,8 @@ pytest-asyncio = [ {file = "pytest-asyncio-0.12.0.tar.gz", hash = "sha256:475bd2f3dc0bc11d2463656b3cbaafdbec5a47b47508ea0b329ee693040eebd2"}, ] pytest-cov = [ - {file = "pytest-cov-2.10.0.tar.gz", hash = "sha256:1a629dc9f48e53512fcbfda6b07de490c374b0c83c55ff7a1720b3fccff0ac87"}, - {file = "pytest_cov-2.10.0-py2.py3-none-any.whl", hash = "sha256:6e6d18092dce6fad667cd7020deed816f858ad3b49d5b5e2b1cc1c97a4dba65c"}, + {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, + {file = "pytest_cov-2.10.1-py2.py3-none-any.whl", hash = "sha256:45ec2d5182f89a81fc3eb29e3d1ed3113b9e9a873bcddb2a71faaab066110191"}, ] pytest-mock = [ {file = "pytest-mock-2.0.0.tar.gz", hash = "sha256:b35eb281e93aafed138db25c8772b95d3756108b601947f89af503f8c629413f"}, @@ -1313,8 +1324,8 @@ pyyaml = [ {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, ] radon = [ - {file = "radon-4.1.0-py2.py3-none-any.whl", hash = "sha256:0c18111ec6cfe7f664bf9db6c51586714ac8c6d9741542706df8a85aca39b99a"}, - {file = "radon-4.1.0.tar.gz", hash = "sha256:56082c52206db45027d4a73612e1b21663c4cc2be3760fee769d966fd7efdd6d"}, + {file = "radon-4.2.0-py2.py3-none-any.whl", hash = "sha256:215e42c8748b5ca8ddf7c061831600b9e73e9c48770a81eeaaeeb066697aee15"}, + {file = "radon-4.2.0.tar.gz", hash = "sha256:b73f6f469c15c9616e0f7ce12080a9ecdee9f2335bdbb5ccea1f2bae26e8d20d"}, ] regex = [ {file = "regex-2020.7.14-cp27-cp27m-win32.whl", hash = "sha256:e46d13f38cfcbb79bfdb2964b0fe12561fe633caf964a77a5f8d4e45fe5d2ef7"}, @@ -1356,8 +1367,8 @@ smmap = [ {file = "smmap-3.0.4.tar.gz", hash = "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"}, ] stevedore = [ - {file = "stevedore-3.1.0-py3-none-any.whl", hash = "sha256:9fb12884b510fdc25f8a883bb390b8ff82f67863fb360891a33135bcb2ce8c54"}, - {file = "stevedore-3.1.0.tar.gz", hash = "sha256:79270bd5fb4a052e76932e9fef6e19afa77090c4000f2680eb8c2e887d2e6e36"}, + {file = "stevedore-3.2.0-py3-none-any.whl", hash = "sha256:c8f4f0ebbc394e52ddf49de8bcc3cf8ad2b4425ebac494106bbc5e3661ac7633"}, + {file = "stevedore-3.2.0.tar.gz", hash = "sha256:38791aa5bed922b0a844513c5f9ed37774b68edc609e5ab8ab8d8fe0ce4315e5"}, ] testfixtures = [ {file = "testfixtures-6.14.1-py2.py3-none-any.whl", hash = "sha256:30566e24a1b34e4d3f8c13abf62557d01eeb4480bcb8f1745467bfb0d415a7d9"}, @@ -1391,13 +1402,13 @@ typed-ast = [ {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, ] typing-extensions = [ - {file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"}, - {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"}, - {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"}, + {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, + {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, + {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] urllib3 = [ - {file = "urllib3-1.25.9-py2.py3-none-any.whl", hash = "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"}, - {file = "urllib3-1.25.9.tar.gz", hash = "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527"}, + {file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"}, + {file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -1411,23 +1422,23 @@ xenon = [ {file = "xenon-0.7.0.tar.gz", hash = "sha256:5e6433c9297d965bf666256a0a030b6e13660ab87680220c4eb07241f101625b"}, ] yarl = [ - {file = "yarl-1.4.2-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b"}, - {file = "yarl-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1"}, - {file = "yarl-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080"}, - {file = "yarl-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a"}, - {file = "yarl-1.4.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f"}, - {file = "yarl-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea"}, - {file = "yarl-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb"}, - {file = "yarl-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70"}, - {file = "yarl-1.4.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d"}, - {file = "yarl-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce"}, - {file = "yarl-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2"}, - {file = "yarl-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce"}, - {file = "yarl-1.4.2-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b"}, - {file = "yarl-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae"}, - {file = "yarl-1.4.2-cp38-cp38-win32.whl", hash = "sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462"}, - {file = "yarl-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6"}, - {file = "yarl-1.4.2.tar.gz", hash = "sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b"}, + {file = "yarl-1.5.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:db6db0f45d2c63ddb1a9d18d1b9b22f308e52c83638c26b422d520a815c4b3fb"}, + {file = "yarl-1.5.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:17668ec6722b1b7a3a05cc0167659f6c95b436d25a36c2d52db0eca7d3f72593"}, + {file = "yarl-1.5.1-cp35-cp35m-win32.whl", hash = "sha256:040b237f58ff7d800e6e0fd89c8439b841f777dd99b4a9cca04d6935564b9409"}, + {file = "yarl-1.5.1-cp35-cp35m-win_amd64.whl", hash = "sha256:f18d68f2be6bf0e89f1521af2b1bb46e66ab0018faafa81d70f358153170a317"}, + {file = "yarl-1.5.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:c52ce2883dc193824989a9b97a76ca86ecd1fa7955b14f87bf367a61b6232511"}, + {file = "yarl-1.5.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ce584af5de8830d8701b8979b18fcf450cef9a382b1a3c8ef189bedc408faf1e"}, + {file = "yarl-1.5.1-cp36-cp36m-win32.whl", hash = "sha256:df89642981b94e7db5596818499c4b2219028f2a528c9c37cc1de45bf2fd3a3f"}, + {file = "yarl-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:3a584b28086bc93c888a6c2aa5c92ed1ae20932f078c46509a66dce9ea5533f2"}, + {file = "yarl-1.5.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:da456eeec17fa8aa4594d9a9f27c0b1060b6a75f2419fe0c00609587b2695f4a"}, + {file = "yarl-1.5.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bc2f976c0e918659f723401c4f834deb8a8e7798a71be4382e024bcc3f7e23a8"}, + {file = "yarl-1.5.1-cp37-cp37m-win32.whl", hash = "sha256:4439be27e4eee76c7632c2427ca5e73703151b22cae23e64adb243a9c2f565d8"}, + {file = "yarl-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:48e918b05850fffb070a496d2b5f97fc31d15d94ca33d3d08a4f86e26d4e7c5d"}, + {file = "yarl-1.5.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9b930776c0ae0c691776f4d2891ebc5362af86f152dd0da463a6614074cb1b02"}, + {file = "yarl-1.5.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:b3b9ad80f8b68519cc3372a6ca85ae02cc5a8807723ac366b53c0f089db19e4a"}, + {file = "yarl-1.5.1-cp38-cp38-win32.whl", hash = "sha256:f379b7f83f23fe12823085cd6b906edc49df969eb99757f58ff382349a3303c6"}, + {file = "yarl-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:9102b59e8337f9874638fcfc9ac3734a0cfadb100e47d55c20d0dc6087fb4692"}, + {file = "yarl-1.5.1.tar.gz", hash = "sha256:c22c75b5f394f3d47105045ea551e08a3e804dc7e01b37800ca35b58f856c3d6"}, ] zipp = [ {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"}, diff --git a/pyproject.toml b/pyproject.toml index 1fc075e8382..803b200a0c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.5.0" +version = "1.6.0" description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric" authors = ["Amazon Web Services"] classifiers=[ @@ -20,8 +20,9 @@ license = "MIT-0" [tool.poetry.dependencies] python = "^3.6" aws-xray-sdk = "^2.5.0" -fastjsonschema = "~=2.14.4" +fastjsonschema = "^2.14.5" boto3 = "^1.12" +jmespath = "^0.10.0" [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^5.0.3"} diff --git a/tests/events/albEvent.json b/tests/events/albEvent.json new file mode 100644 index 00000000000..9328cb39e12 --- /dev/null +++ b/tests/events/albEvent.json @@ -0,0 +1,28 @@ +{ + "requestContext": { + "elb": { + "targetGroupArn": "arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/lambda-279XGJDqGZ5rsrHC2Fjr/49e9d65c45c6791a" + } + }, + "httpMethod": "GET", + "path": "/lambda", + "queryStringParameters": { + "query": "1234ABCD" + }, + "headers": { + "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", + "accept-encoding": "gzip", + "accept-language": "en-US,en;q=0.9", + "connection": "keep-alive", + "host": "lambda-alb-123578498.us-east-2.elb.amazonaws.com", + "upgrade-insecure-requests": "1", + "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36", + "x-amzn-trace-id": "Root=1-5c536348-3d683b8b04734faae651f476", + "x-forwarded-for": "72.12.164.125", + "x-forwarded-port": "80", + "x-forwarded-proto": "http", + "x-imforwards": "20" + }, + "body": "Test", + "isBase64Encoded": false +} diff --git a/tests/events/apiGatewayProxyEvent.json b/tests/events/apiGatewayProxyEvent.json new file mode 100644 index 00000000000..1fed04a25bf --- /dev/null +++ b/tests/events/apiGatewayProxyEvent.json @@ -0,0 +1,70 @@ +{ + "version": "1.0", + "resource": "/my/path", + "path": "/my/path", + "httpMethod": "GET", + "headers": { + "Header1": "value1", + "Header2": "value2" + }, + "multiValueHeaders": { + "Header1": [ + "value1" + ], + "Header2": [ + "value1", + "value2" + ] + }, + "queryStringParameters": { + "parameter1": "value1", + "parameter2": "value" + }, + "multiValueQueryStringParameters": { + "parameter1": [ + "value1", + "value2" + ], + "parameter2": [ + "value" + ] + }, + "requestContext": { + "accountId": "123456789012", + "apiId": "id", + "authorizer": { + "claims": null, + "scopes": null + }, + "domainName": "id.execute-api.us-east-1.amazonaws.com", + "domainPrefix": "id", + "extendedRequestId": "request-id", + "httpMethod": "GET", + "identity": { + "accessKey": null, + "accountId": null, + "caller": null, + "cognitoAuthenticationProvider": null, + "cognitoAuthenticationType": null, + "cognitoIdentityId": null, + "cognitoIdentityPoolId": null, + "principalOrgId": null, + "sourceIp": "IP", + "user": null, + "userAgent": "user-agent", + "userArn": null + }, + "path": "/my/path", + "protocol": "HTTP/1.1", + "requestId": "id=", + "requestTime": "04/Mar/2020:19:15:17 +0000", + "requestTimeEpoch": 1583349317135, + "resourceId": null, + "resourcePath": "/my/path", + "stage": "$default" + }, + "pathParameters": null, + "stageVariables": null, + "body": "Hello from Lambda!", + "isBase64Encoded": true +} diff --git a/tests/events/apiGatewayProxyV2Event.json b/tests/events/apiGatewayProxyV2Event.json new file mode 100644 index 00000000000..9c310e6d52f --- /dev/null +++ b/tests/events/apiGatewayProxyV2Event.json @@ -0,0 +1,57 @@ +{ + "version": "2.0", + "routeKey": "$default", + "rawPath": "/my/path", + "rawQueryString": "parameter1=value1¶meter1=value2¶meter2=value", + "cookies": [ + "cookie1", + "cookie2" + ], + "headers": { + "Header1": "value1", + "Header2": "value1,value2" + }, + "queryStringParameters": { + "parameter1": "value1,value2", + "parameter2": "value" + }, + "requestContext": { + "accountId": "123456789012", + "apiId": "api-id", + "authorizer": { + "jwt": { + "claims": { + "claim1": "value1", + "claim2": "value2" + }, + "scopes": [ + "scope1", + "scope2" + ] + } + }, + "domainName": "id.execute-api.us-east-1.amazonaws.com", + "domainPrefix": "id", + "http": { + "method": "POST", + "path": "/my/path", + "protocol": "HTTP/1.1", + "sourceIp": "IP", + "userAgent": "agent" + }, + "requestId": "id", + "routeKey": "$default", + "stage": "$default", + "time": "12/Mar/2020:19:03:58 +0000", + "timeEpoch": 1583348638390 + }, + "body": "Hello from Lambda", + "pathParameters": { + "parameter1": "value1" + }, + "isBase64Encoded": false, + "stageVariables": { + "stageVariable1": "value1", + "stageVariable2": "value2" + } +} diff --git a/tests/events/cloudWatchLogEvent.json b/tests/events/cloudWatchLogEvent.json new file mode 100644 index 00000000000..aa184c1d013 --- /dev/null +++ b/tests/events/cloudWatchLogEvent.json @@ -0,0 +1,5 @@ +{ + "awslogs": { + "data": "H4sIAAAAAAAAAHWPwQqCQBCGX0Xm7EFtK+smZBEUgXoLCdMhFtKV3akI8d0bLYmibvPPN3wz00CJxmQnTO41whwWQRIctmEcB6sQbFC3CjW3XW8kxpOpP+OC22d1Wml1qZkQGtoMsScxaczKN3plG8zlaHIta5KqWsozoTYw3/djzwhpLwivWFGHGpAFe7DL68JlBUk+l7KSN7tCOEJ4M3/qOI49vMHj+zCKdlFqLaU2ZHV2a4Ct/an0/ivdX8oYc1UVX860fQDQiMdxRQEAAA==" + } +} diff --git a/tests/events/cognitoCustomMessageEvent.json b/tests/events/cognitoCustomMessageEvent.json new file mode 100644 index 00000000000..8652c3bff40 --- /dev/null +++ b/tests/events/cognitoCustomMessageEvent.json @@ -0,0 +1,20 @@ +{ + "version": "1", + "triggerSource": "CustomMessage_AdminCreateUser", + "region": "region", + "userPoolId": "userPoolId", + "userName": "userName", + "callerContext": { + "awsSdk": "awsSdkVersion", + "clientId": "clientId" + }, + "request": { + "userAttributes": { + "phone_number_verified": false, + "email_verified": true + }, + "codeParameter": "####", + "usernameParameter": "username" + }, + "response": {} +} diff --git a/tests/events/cognitoPostAuthenticationEvent.json b/tests/events/cognitoPostAuthenticationEvent.json new file mode 100644 index 00000000000..3b1faa81bf9 --- /dev/null +++ b/tests/events/cognitoPostAuthenticationEvent.json @@ -0,0 +1,18 @@ +{ + "version": "1", + "region": "us-east-1", + "userPoolId": "us-east-1_example", + "userName": "UserName", + "callerContext": { + "awsSdkVersion": "awsSdkVersion", + "clientId": "clientId" + }, + "triggerSource": "PostAuthentication_Authentication", + "request": { + "newDeviceUsed": true, + "userAttributes": { + "email": "test@mail.com" + } + }, + "response": {} +} diff --git a/tests/events/cognitoPostConfirmationEvent.json b/tests/events/cognitoPostConfirmationEvent.json new file mode 100644 index 00000000000..e88f98150ca --- /dev/null +++ b/tests/events/cognitoPostConfirmationEvent.json @@ -0,0 +1,18 @@ +{ + "version": "string", + "triggerSource": "PostConfirmation_ConfirmSignUp", + "region": "us-east-1", + "userPoolId": "string", + "userName": "userName", + "callerContext": { + "awsSdkVersion": "awsSdkVersion", + "clientId": "clientId" + }, + "request": { + "userAttributes": { + "email": "user@example.com", + "email_verified": true + } + }, + "response": {} +} diff --git a/tests/events/cognitoPreAuthenticationEvent.json b/tests/events/cognitoPreAuthenticationEvent.json new file mode 100644 index 00000000000..75ff9ce34b3 --- /dev/null +++ b/tests/events/cognitoPreAuthenticationEvent.json @@ -0,0 +1,20 @@ +{ + "version": "1", + "region": "us-east-1", + "userPoolId": "us-east-1_example", + "userName": "UserName", + "callerContext": { + "awsSdkVersion": "awsSdkVersion", + "clientId": "clientId" + }, + "triggerSource": "PreAuthentication_Authentication", + "request": { + "userAttributes": { + "sub": "4A709A36-7D63-4785-829D-4198EF10EBDA", + "email_verified": "true", + "name": "First Last", + "email": "test@mail.com" + } + }, + "response": {} +} diff --git a/tests/events/cognitoPreSignUpEvent.json b/tests/events/cognitoPreSignUpEvent.json new file mode 100644 index 00000000000..feb4eba25dd --- /dev/null +++ b/tests/events/cognitoPreSignUpEvent.json @@ -0,0 +1,18 @@ +{ + "version": "string", + "triggerSource": "PreSignUp_SignUp", + "region": "us-east-1", + "userPoolId": "string", + "userName": "userName", + "callerContext": { + "awsSdkVersion": "awsSdkVersion", + "clientId": "clientId" + }, + "request": { + "userAttributes": { + "email": "user@example.com", + "phone_number": "+12065550100" + } + }, + "response": {} +} diff --git a/tests/events/cognitoPreTokenGenerationEvent.json b/tests/events/cognitoPreTokenGenerationEvent.json new file mode 100644 index 00000000000..f5ee69e0d2d --- /dev/null +++ b/tests/events/cognitoPreTokenGenerationEvent.json @@ -0,0 +1,25 @@ +{ + "version": "1", + "triggerSource": "TokenGeneration_Authentication", + "region": "us-west-2", + "userPoolId": "us-west-2_example", + "userName": "testqq", + "callerContext": { + "awsSdkVersion": "aws-sdk-unknown-unknown", + "clientId": "71ghuul37mresr7h373b704tua" + }, + "request": { + "userAttributes": { + "sub": "0b0a57c5-f013-426a-81a1-f8ffbfba21f0", + "email_verified": "true", + "cognito:user_status": "CONFIRMED", + "email": "test@mail.com" + }, + "groupConfiguration": { + "groupsToOverride": [], + "iamRolesToOverride": [], + "preferredRole": null + } + }, + "response": {} +} diff --git a/tests/events/cognitoUserMigrationEvent.json b/tests/events/cognitoUserMigrationEvent.json new file mode 100644 index 00000000000..2eae4e66189 --- /dev/null +++ b/tests/events/cognitoUserMigrationEvent.json @@ -0,0 +1,15 @@ +{ + "version": "string", + "triggerSource": "UserMigration_Authentication", + "region": "us-east-1", + "userPoolId": "string", + "userName": "userName", + "callerContext": { + "awsSdkVersion": "awsSdkVersion", + "clientId": "clientId" + }, + "request": { + "password": "password" + }, + "response": {} +} diff --git a/tests/events/dynamoStreamEvent.json b/tests/events/dynamoStreamEvent.json new file mode 100644 index 00000000000..12c535b005e --- /dev/null +++ b/tests/events/dynamoStreamEvent.json @@ -0,0 +1,64 @@ +{ + "Records": [ + { + "eventID": "1", + "eventVersion": "1.0", + "dynamodb": { + "Keys": { + "Id": { + "N": "101" + } + }, + "NewImage": { + "Message": { + "S": "New item!" + }, + "Id": { + "N": "101" + } + }, + "StreamViewType": "NEW_AND_OLD_IMAGES", + "SequenceNumber": "111", + "SizeBytes": 26 + }, + "awsRegion": "us-west-2", + "eventName": "INSERT", + "eventSourceARN": "eventsource_arn", + "eventSource": "aws:dynamodb" + }, + { + "eventID": "2", + "eventVersion": "1.0", + "dynamodb": { + "OldImage": { + "Message": { + "S": "New item!" + }, + "Id": { + "N": "101" + } + }, + "SequenceNumber": "222", + "Keys": { + "Id": { + "N": "101" + } + }, + "SizeBytes": 59, + "NewImage": { + "Message": { + "S": "This item has changed" + }, + "Id": { + "N": "101" + } + }, + "StreamViewType": "NEW_AND_OLD_IMAGES" + }, + "awsRegion": "us-west-2", + "eventName": "MODIFY", + "eventSourceARN": "source_arn", + "eventSource": "aws:dynamodb" + } + ] +} diff --git a/tests/events/eventBridgeEvent.json b/tests/events/eventBridgeEvent.json new file mode 100644 index 00000000000..e8d949001c9 --- /dev/null +++ b/tests/events/eventBridgeEvent.json @@ -0,0 +1,16 @@ +{ + "version": "0", + "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718", + "detail-type": "EC2 Instance State-change Notification", + "source": "aws.ec2", + "account": "111122223333", + "time": "2017-12-22T18:43:48Z", + "region": "us-west-1", + "resources": [ + "arn:aws:ec2:us-west-1:123456789012:instance/ i-1234567890abcdef0" + ], + "detail": { + "instance-id": " i-1234567890abcdef0", + "state": "terminated" + } +} diff --git a/tests/events/kinesisStreamEvent.json b/tests/events/kinesisStreamEvent.json new file mode 100644 index 00000000000..ef8e2096388 --- /dev/null +++ b/tests/events/kinesisStreamEvent.json @@ -0,0 +1,36 @@ +{ + "Records": [ + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "49590338271490256608559692538361571095921575989136588898", + "data": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0Lg==", + "approximateArrivalTimestamp": 1545084650.987 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:49590338271490256608559692538361571095921575989136588898", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + }, + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "49590338271490256608559692540925702759324208523137515618", + "data": "VGhpcyBpcyBvbmx5IGEgdGVzdC4=", + "approximateArrivalTimestamp": 1545084711.166 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:49590338271490256608559692540925702759324208523137515618", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + } + ] +} diff --git a/tests/events/s3Event.json b/tests/events/s3Event.json new file mode 100644 index 00000000000..4558dc3c9e1 --- /dev/null +++ b/tests/events/s3Event.json @@ -0,0 +1,38 @@ +{ + "Records": [ + { + "eventVersion": "2.1", + "eventSource": "aws:s3", + "awsRegion": "us-east-2", + "eventTime": "2019-09-03T19:37:27.192Z", + "eventName": "ObjectCreated:Put", + "userIdentity": { + "principalId": "AWS:AIDAINPONIXQXHT3IKHL2" + }, + "requestParameters": { + "sourceIPAddress": "205.255.255.255" + }, + "responseElements": { + "x-amz-request-id": "D82B88E5F771F645", + "x-amz-id-2": "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" + }, + "s3": { + "s3SchemaVersion": "1.0", + "configurationId": "828aa6fc-f7b5-4305-8584-487c791949c1", + "bucket": { + "name": "lambda-artifacts-deafc19498e3f2df", + "ownerIdentity": { + "principalId": "A3I5XTEXAMAI3E" + }, + "arn": "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" + }, + "object": { + "key": "b21b84d653bb07b05b1e6b33684dc11b", + "size": 1305107, + "eTag": "b21b84d653bb07b05b1e6b33684dc11b", + "sequencer": "0C0F6F405D6ED209E1" + } + } + } + ] +} diff --git a/tests/events/sesEvent.json b/tests/events/sesEvent.json new file mode 100644 index 00000000000..5a5afd5bab7 --- /dev/null +++ b/tests/events/sesEvent.json @@ -0,0 +1,100 @@ +{ + "Records": [ + { + "eventVersion": "1.0", + "ses": { + "mail": { + "commonHeaders": { + "from": [ + "Jane Doe " + ], + "to": [ + "johndoe@example.com" + ], + "returnPath": "janedoe@example.com", + "messageId": "<0123456789example.com>", + "date": "Wed, 7 Oct 2015 12:34:56 -0700", + "subject": "Test Subject" + }, + "source": "janedoe@example.com", + "timestamp": "1970-01-01T00:00:00.000Z", + "destination": [ + "johndoe@example.com" + ], + "headers": [ + { + "name": "Return-Path", + "value": "" + }, + { + "name": "Received", + "value": "from mailer.example.com (mailer.example.com [203.0.113.1]) by ..." + }, + { + "name": "DKIM-Signature", + "value": "v=1; a=rsa-sha256; c=relaxed/relaxed; d=example.com; s=example; ..." + }, + { + "name": "MIME-Version", + "value": "1.0" + }, + { + "name": "From", + "value": "Jane Doe " + }, + { + "name": "Date", + "value": "Wed, 7 Oct 2015 12:34:56 -0700" + }, + { + "name": "Message-ID", + "value": "<0123456789example.com>" + }, + { + "name": "Subject", + "value": "Test Subject" + }, + { + "name": "To", + "value": "johndoe@example.com" + }, + { + "name": "Content-Type", + "value": "text/plain; charset=UTF-8" + } + ], + "headersTruncated": false, + "messageId": "o3vrnil0e2ic28tr" + }, + "receipt": { + "recipients": [ + "johndoe@example.com" + ], + "timestamp": "1970-01-01T00:00:00.000Z", + "spamVerdict": { + "status": "PASS" + }, + "dkimVerdict": { + "status": "PASS" + }, + "processingTimeMillis": 574, + "action": { + "type": "Lambda", + "invocationType": "Event", + "functionArn": "arn:aws:lambda:us-west-2:012345678912:function:Example" + }, + "dmarcVerdict": { + "status": "PASS" + }, + "spfVerdict": { + "status": "PASS" + }, + "virusVerdict": { + "status": "PASS" + } + } + }, + "eventSource": "aws:ses" + } + ] +} diff --git a/tests/events/snsEvent.json b/tests/events/snsEvent.json new file mode 100644 index 00000000000..b351dfd1418 --- /dev/null +++ b/tests/events/snsEvent.json @@ -0,0 +1,31 @@ +{ + "Records": [ + { + "EventVersion": "1.0", + "EventSubscriptionArn": "arn:aws:sns:us-east-2:123456789012:sns-la ...", + "EventSource": "aws:sns", + "Sns": { + "SignatureVersion": "1", + "Timestamp": "2019-01-02T12:45:07.000Z", + "Signature": "tcc6faL2yUC6dgZdmrwh1Y4cGa/ebXEkAi6RibDsvpi+tE/1+82j...65r==", + "SigningCertUrl": "https://sns.us-east-2.amazonaws.com/SimpleNotificat ...", + "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", + "Message": "Hello from SNS!", + "MessageAttributes": { + "Test": { + "Type": "String", + "Value": "TestString" + }, + "TestBinary": { + "Type": "Binary", + "Value": "TestBinary" + } + }, + "Type": "Notification", + "UnsubscribeUrl": "https://sns.us-east-2.amazonaws.com/?Action=Unsubscri ...", + "TopicArn": "arn:aws:sns:us-east-2:123456789012:sns-lambda", + "Subject": "TestInvoke" + } + } + ] +} diff --git a/tests/events/sqsEvent.json b/tests/events/sqsEvent.json new file mode 100644 index 00000000000..7201068d60c --- /dev/null +++ b/tests/events/sqsEvent.json @@ -0,0 +1,42 @@ +{ + "Records": [ + { + "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", + "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a...", + "body": "Test message.", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": { + "testAttr": { + "stringValue": "100", + "binaryValue": "base64Str", + "dataType": "Number" + } + }, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue", + "awsRegion": "us-east-2" + }, + { + "messageId": "2e1424d4-f796-459a-8184-9c92662be6da", + "receiptHandle": "AQEBzWwaftRI0KuVm4tP+/7q1rGgNqicHq...", + "body": "Test message.", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082650636", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082650649" + }, + "messageAttributes": {}, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue", + "awsRegion": "us-east-2" + } + ] +} diff --git a/tests/functional/test_lambda_trigger_events.py b/tests/functional/test_lambda_trigger_events.py new file mode 100644 index 00000000000..21e775b7a5f --- /dev/null +++ b/tests/functional/test_lambda_trigger_events.py @@ -0,0 +1,633 @@ +import base64 +import json +import os +from secrets import compare_digest +from urllib.parse import quote_plus + +from aws_lambda_powertools.utilities.data_classes import ( + ALBEvent, + APIGatewayProxyEvent, + APIGatewayProxyEventV2, + CloudWatchLogsEvent, + EventBridgeEvent, + KinesisStreamEvent, + S3Event, + SESEvent, + SNSEvent, + SQSEvent, +) +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import ( + CustomMessageTriggerEvent, + PostAuthenticationTriggerEvent, + PostConfirmationTriggerEvent, + PreAuthenticationTriggerEvent, + PreSignUpTriggerEvent, + PreTokenGenerationTriggerEvent, + UserMigrationTriggerEvent, +) +from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + AttributeValue, + DynamoDBRecordEventName, + DynamoDBStreamEvent, + StreamViewType, +) + + +def load_event(file_name: str) -> dict: + full_file_name = os.path.dirname(os.path.realpath(__file__)) + "/../events/" + file_name + with open(full_file_name) as fp: + return json.load(fp) + + +def test_cloud_watch_trigger_event(): + event = CloudWatchLogsEvent(load_event("cloudWatchLogEvent.json")) + + decompressed_logs_data = event.decompress_logs_data + assert event.decompress_logs_data == decompressed_logs_data + + json_logs_data = event.parse_logs_data() + assert event.parse_logs_data()._data == json_logs_data._data + log_events = json_logs_data.log_events + log_event = log_events[0] + + assert json_logs_data.owner == "123456789123" + assert json_logs_data.log_group == "testLogGroup" + assert json_logs_data.log_stream == "testLogStream" + assert json_logs_data.subscription_filters == ["testFilter"] + assert json_logs_data.message_type == "DATA_MESSAGE" + + assert log_event.get_id == "eventId1" + assert log_event.timestamp == 1440442987000 + assert log_event.message == "[ERROR] First test message" + assert log_event.extracted_fields is None + + event2 = CloudWatchLogsEvent(load_event("cloudWatchLogEvent.json")) + assert event._data == event2._data + + +def test_cognito_pre_signup_trigger_event(): + event = PreSignUpTriggerEvent(load_event("cognitoPreSignUpEvent.json")) + + assert event.version == "string" + assert event.trigger_source == "PreSignUp_SignUp" + assert event.region == "us-east-1" + assert event.user_pool_id == "string" + assert event.user_name == "userName" + caller_context = event.caller_context + assert caller_context.aws_sdk_version == "awsSdkVersion" + assert caller_context.client_id == "clientId" + + user_attributes = event.request.user_attributes + assert user_attributes["email"] == "user@example.com" + + assert event.request.validation_data is None + assert event.request.client_metadata is None + + event.response.auto_confirm_user = True + assert event.response.auto_confirm_user is True + event.response.auto_verify_phone = True + assert event.response.auto_verify_phone is True + event.response.auto_verify_email = True + assert event.response.auto_verify_email is True + assert event["response"]["autoVerifyEmail"] is True + + +def test_cognito_post_confirmation_trigger_event(): + event = PostConfirmationTriggerEvent(load_event("cognitoPostConfirmationEvent.json")) + + user_attributes = event.request.user_attributes + assert user_attributes["email"] == "user@example.com" + assert event.request.client_metadata is None + + +def test_cognito_user_migration_trigger_event(): + event = UserMigrationTriggerEvent(load_event("cognitoUserMigrationEvent.json")) + + assert compare_digest(event.request.password, event["request"]["password"]) + assert event.request.validation_data is None + assert event.request.client_metadata is None + + event.response.user_attributes = {"username": "username"} + assert event.response.user_attributes == event["response"]["userAttributes"] + assert event.response.user_attributes == {"username": "username"} + assert event.response.final_user_status is None + assert event.response.message_action is None + assert event.response.force_alias_creation is None + assert event.response.desired_delivery_mediums is None + + event.response.final_user_status = "CONFIRMED" + assert event.response.final_user_status == "CONFIRMED" + event.response.message_action = "SUPPRESS" + assert event.response.message_action == "SUPPRESS" + event.response.force_alias_creation = True + assert event.response.force_alias_creation is True + event.response.desired_delivery_mediums = ["EMAIL"] + assert event.response.desired_delivery_mediums == ["EMAIL"] + + +def test_cognito_custom_message_trigger_event(): + event = CustomMessageTriggerEvent(load_event("cognitoCustomMessageEvent.json")) + + assert event.request.code_parameter == "####" + assert event.request.username_parameter == "username" + assert event.request.user_attributes["phone_number_verified"] is False + assert event.request.client_metadata is None + + event.response.sms_message = "sms" + assert event.response.sms_message == event["response"]["smsMessage"] + event.response.email_message = "email" + assert event.response.email_message == event["response"]["emailMessage"] + event.response.email_subject = "subject" + assert event.response.email_subject == event["response"]["emailSubject"] + + +def test_cognito_pre_authentication_trigger_event(): + event = PreAuthenticationTriggerEvent(load_event("cognitoPreAuthenticationEvent.json")) + + assert event.request.user_not_found is None + event["request"]["userNotFound"] = True + assert event.request.user_not_found is True + assert event.request.user_attributes["email"] == "test@mail.com" + assert event.request.validation_data is None + + +def test_cognito_post_authentication_trigger_event(): + event = PostAuthenticationTriggerEvent(load_event("cognitoPostAuthenticationEvent.json")) + + assert event.request.new_device_used is True + assert event.request.user_attributes["email"] == "test@mail.com" + assert event.request.client_metadata is None + + +def test_cognito_pre_token_generation_trigger_event(): + event = PreTokenGenerationTriggerEvent(load_event("cognitoPreTokenGenerationEvent.json")) + + group_configuration = event.request.group_configuration + assert group_configuration.groups_to_override == [] + assert group_configuration.iam_roles_to_override == [] + assert group_configuration.preferred_role is None + assert event.request.user_attributes["email"] == "test@mail.com" + assert event.request.client_metadata is None + + event["request"]["groupConfiguration"]["preferredRole"] = "temp" + group_configuration = event.request.group_configuration + assert group_configuration.preferred_role == "temp" + + assert event["response"].get("claimsOverrideDetails") is None + claims_override_details = event.response.claims_override_details + assert event["response"]["claimsOverrideDetails"] == {} + + assert claims_override_details.claims_to_add_or_override is None + assert claims_override_details.claims_to_suppress is None + assert claims_override_details.group_configuration is None + + claims_override_details.group_configuration = {} + assert claims_override_details.group_configuration._data == {} + assert event["response"]["claimsOverrideDetails"]["groupOverrideDetails"] == {} + + expected_claims = {"test": "value"} + claims_override_details.claims_to_add_or_override = expected_claims + assert claims_override_details.claims_to_add_or_override["test"] == "value" + assert event["response"]["claimsOverrideDetails"]["claimsToAddOrOverride"] == expected_claims + + claims_override_details.claims_to_suppress = ["email"] + assert claims_override_details.claims_to_suppress[0] == "email" + assert event["response"]["claimsOverrideDetails"]["claimsToSuppress"] == ["email"] + + expected_groups = ["group-A", "group-B"] + claims_override_details.set_group_configuration_groups_to_override(expected_groups) + assert claims_override_details.group_configuration.groups_to_override == expected_groups + assert event["response"]["claimsOverrideDetails"]["groupOverrideDetails"]["groupsToOverride"] == expected_groups + + claims_override_details.set_group_configuration_iam_roles_to_override(["role"]) + assert claims_override_details.group_configuration.iam_roles_to_override == ["role"] + assert event["response"]["claimsOverrideDetails"]["groupOverrideDetails"]["iamRolesToOverride"] == ["role"] + + claims_override_details.set_group_configuration_preferred_role("role_name") + assert claims_override_details.group_configuration.preferred_role == "role_name" + assert event["response"]["claimsOverrideDetails"]["groupOverrideDetails"]["preferredRole"] == "role_name" + + +def test_dynamo_db_stream_trigger_event(): + event = DynamoDBStreamEvent(load_event("dynamoStreamEvent.json")) + + records = list(event.records) + + record = records[0] + assert record.aws_region == "us-west-2" + dynamodb = record.dynamodb + assert dynamodb is not None + assert dynamodb.approximate_creation_date_time is None + keys = dynamodb.keys + assert keys is not None + id_key = keys["Id"] + assert id_key.b_value is None + assert id_key.bs_value is None + assert id_key.bool_value is None + assert id_key.list_value is None + assert id_key.map_value is None + assert id_key.n_value == "101" + assert id_key.ns_value is None + assert id_key.null_value is None + assert id_key.s_value is None + assert id_key.ss_value is None + message_key = dynamodb.new_image["Message"] + assert message_key is not None + assert message_key.s_value == "New item!" + assert dynamodb.old_image is None + assert dynamodb.sequence_number == "111" + assert dynamodb.size_bytes == 26 + assert dynamodb.stream_view_type == StreamViewType.NEW_AND_OLD_IMAGES + assert record.event_id == "1" + assert record.event_name is DynamoDBRecordEventName.INSERT + assert record.event_source == "aws:dynamodb" + assert record.event_source_arn == "eventsource_arn" + assert record.event_version == "1.0" + assert record.user_identity is None + + +def test_dynamo_attribute_value_list_value(): + example_attribute_value = {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]} + attribute_value = AttributeValue(example_attribute_value) + list_value = attribute_value.list_value + assert list_value is not None + item = list_value[0] + assert item.s_value == "Cookies" + + +def test_dynamo_attribute_value_map_value(): + example_attribute_value = {"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}} + + attribute_value = AttributeValue(example_attribute_value) + + map_value = attribute_value.map_value + assert map_value is not None + item = map_value["Name"] + assert item.s_value == "Joe" + + +def test_event_bridge_event(): + event = EventBridgeEvent(load_event("eventBridgeEvent.json")) + + assert event.get_id == event["id"] + assert event.version == event["version"] + assert event.account == event["account"] + assert event.time == event["time"] + assert event.region == event["region"] + assert event.resources == event["resources"] + assert event.source == event["source"] + assert event.detail_type == event["detail-type"] + assert event.detail == event["detail"] + + +def test_s3_trigger_event(): + event = S3Event(load_event("s3Event.json")) + records = list(event.records) + assert len(records) == 1 + record = records[0] + assert record.event_version == "2.1" + assert record.event_source == "aws:s3" + assert record.aws_region == "us-east-2" + assert record.event_time == "2019-09-03T19:37:27.192Z" + assert record.event_name == "ObjectCreated:Put" + user_identity = record.user_identity + assert user_identity.principal_id == "AWS:AIDAINPONIXQXHT3IKHL2" + request_parameters = record.request_parameters + assert request_parameters.source_ip_address == "205.255.255.255" + assert record.response_elements["x-amz-request-id"] == "D82B88E5F771F645" + s3 = record.s3 + assert s3.s3_schema_version == "1.0" + assert s3.configuration_id == "828aa6fc-f7b5-4305-8584-487c791949c1" + bucket = s3.bucket + assert bucket.name == "lambda-artifacts-deafc19498e3f2df" + assert bucket.owner_identity.principal_id == "A3I5XTEXAMAI3E" + assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" + assert s3.get_object.key == "b21b84d653bb07b05b1e6b33684dc11b" + assert s3.get_object.size == 1305107 + assert s3.get_object.etag == "b21b84d653bb07b05b1e6b33684dc11b" + assert s3.get_object.version_id is None + assert s3.get_object.sequencer == "0C0F6F405D6ED209E1" + assert record.glacier_event_data is None + assert event.record._data == event["Records"][0] + assert event.bucket_name == "lambda-artifacts-deafc19498e3f2df" + assert event.object_key == "b21b84d653bb07b05b1e6b33684dc11b" + + +def test_s3_key_unquote_plus(): + tricky_name = "foo name+value" + event_dict = {"Records": [{"s3": {"object": {"key": quote_plus(tricky_name)}}}]} + event = S3Event(event_dict) + assert event.object_key == tricky_name + + +def test_s3_glacier_event(): + example_event = { + "Records": [ + { + "glacierEventData": { + "restoreEventData": { + "lifecycleRestorationExpiryTime": "1970-01-01T00:01:00.000Z", + "lifecycleRestoreStorageClass": "standard", + } + } + } + ] + } + event = S3Event(example_event) + record = next(event.records) + glacier_event_data = record.glacier_event_data + assert glacier_event_data is not None + assert glacier_event_data.restore_event_data.lifecycle_restoration_expiry_time == "1970-01-01T00:01:00.000Z" + assert glacier_event_data.restore_event_data.lifecycle_restore_storage_class == "standard" + + +def test_ses_trigger_event(): + event = SESEvent(load_event("sesEvent.json")) + + expected_address = "johndoe@example.com" + records = list(event.records) + record = records[0] + assert record.event_source == "aws:ses" + assert record.event_version == "1.0" + mail = record.ses.mail + assert mail.timestamp == "1970-01-01T00:00:00.000Z" + assert mail.source == "janedoe@example.com" + assert mail.message_id == "o3vrnil0e2ic28tr" + assert mail.destination == [expected_address] + assert mail.headers_truncated is False + headers = list(mail.headers) + assert len(headers) == 10 + assert headers[0].name == "Return-Path" + assert headers[0].value == "" + common_headers = mail.common_headers + assert common_headers.return_path == "janedoe@example.com" + assert common_headers.get_from == common_headers._data["from"] + assert common_headers.date == "Wed, 7 Oct 2015 12:34:56 -0700" + assert common_headers.to == [expected_address] + assert common_headers.message_id == "<0123456789example.com>" + assert common_headers.subject == "Test Subject" + receipt = record.ses.receipt + assert receipt.timestamp == "1970-01-01T00:00:00.000Z" + assert receipt.processing_time_millis == 574 + assert receipt.recipients == [expected_address] + assert receipt.spam_verdict.status == "PASS" + assert receipt.virus_verdict.status == "PASS" + assert receipt.spf_verdict.status == "PASS" + assert receipt.dmarc_verdict.status == "PASS" + action = receipt.action + assert action.get_type == action._data["type"] + assert action.function_arn == action._data["functionArn"] + assert action.invocation_type == action._data["invocationType"] + assert event.record._data == event["Records"][0] + assert event.mail._data == event["Records"][0]["ses"]["mail"] + assert event.receipt._data == event["Records"][0]["ses"]["receipt"] + + +def test_sns_trigger_event(): + event = SNSEvent(load_event("snsEvent.json")) + records = list(event.records) + assert len(records) == 1 + record = records[0] + assert record.event_version == "1.0" + assert record.event_subscription_arn == "arn:aws:sns:us-east-2:123456789012:sns-la ..." + assert record.event_source == "aws:sns" + sns = record.sns + assert sns.signature_version == "1" + assert sns.timestamp == "2019-01-02T12:45:07.000Z" + assert sns.signature == "tcc6faL2yUC6dgZdmrwh1Y4cGa/ebXEkAi6RibDsvpi+tE/1+82j...65r==" + assert sns.signing_cert_url == "https://sns.us-east-2.amazonaws.com/SimpleNotificat ..." + assert sns.message_id == "95df01b4-ee98-5cb9-9903-4c221d41eb5e" + assert sns.message == "Hello from SNS!" + message_attributes = sns.message_attributes + test_message_attribute = message_attributes["Test"] + assert test_message_attribute.get_type == "String" + assert test_message_attribute.value == "TestString" + assert sns.get_type == "Notification" + assert sns.unsubscribe_url == "https://sns.us-east-2.amazonaws.com/?Action=Unsubscri ..." + assert sns.topic_arn == "arn:aws:sns:us-east-2:123456789012:sns-lambda" + assert sns.subject == "TestInvoke" + assert event.record._data == event["Records"][0] + assert event.sns_message == "Hello from SNS!" + + +def test_seq_trigger_event(): + event = SQSEvent(load_event("sqsEvent.json")) + + records = list(event.records) + record = records[0] + attributes = record.attributes + message_attributes = record.message_attributes + test_attr = message_attributes["testAttr"] + + assert len(records) == 2 + assert record.message_id == "059f36b4-87a3-44ab-83d2-661975830a7d" + assert record.receipt_handle == "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a..." + assert record.body == "Test message." + assert attributes.aws_trace_header is None + assert attributes.approximate_receive_count == "1" + assert attributes.sent_timestamp == "1545082649183" + assert attributes.sender_id == "AIDAIENQZJOLO23YVJ4VO" + assert attributes.approximate_first_receive_timestamp == "1545082649185" + assert attributes.sequence_number is None + assert attributes.message_group_id is None + assert attributes.message_deduplication_id is None + assert message_attributes["NotFound"] is None + assert message_attributes.get("NotFound") is None + assert test_attr.string_value == "100" + assert test_attr.binary_value == "base64Str" + assert test_attr.data_type == "Number" + assert record.md5_of_body == "e4e68fb7bd0e697a0ae8f1bb342846b3" + assert record.event_source == "aws:sqs" + assert record.event_source_arn == "arn:aws:sqs:us-east-2:123456789012:my-queue" + assert record.aws_region == "us-east-2" + + +def test_api_gateway_proxy_event(): + event = APIGatewayProxyEvent(load_event("apiGatewayProxyEvent.json")) + + assert event.version == event["version"] + assert event.resource == event["resource"] + assert event.path == event["path"] + assert event.http_method == event["httpMethod"] + assert event.headers == event["headers"] + assert event.multi_value_headers == event["multiValueHeaders"] + assert event.query_string_parameters == event["queryStringParameters"] + assert event.multi_value_query_string_parameters == event["multiValueQueryStringParameters"] + + request_context = event.request_context + assert request_context.account_id == event["requestContext"]["accountId"] + assert request_context.api_id == event["requestContext"]["apiId"] + + authorizer = request_context.authorizer + assert authorizer.claims is None + assert authorizer.scopes is None + + assert request_context.domain_name == event["requestContext"]["domainName"] + assert request_context.domain_prefix == event["requestContext"]["domainPrefix"] + assert request_context.extended_request_id == event["requestContext"]["extendedRequestId"] + assert request_context.http_method == event["requestContext"]["httpMethod"] + + identity = request_context.identity + assert identity.access_key == event["requestContext"]["identity"]["accessKey"] + assert identity.account_id == event["requestContext"]["identity"]["accountId"] + assert identity.caller == event["requestContext"]["identity"]["caller"] + assert ( + identity.cognito_authentication_provider == event["requestContext"]["identity"]["cognitoAuthenticationProvider"] + ) + assert identity.cognito_authentication_type == event["requestContext"]["identity"]["cognitoAuthenticationType"] + assert identity.cognito_identity_id == event["requestContext"]["identity"]["cognitoIdentityId"] + assert identity.cognito_identity_pool_id == event["requestContext"]["identity"]["cognitoIdentityPoolId"] + assert identity.principal_org_id == event["requestContext"]["identity"]["principalOrgId"] + assert identity.source_ip == event["requestContext"]["identity"]["sourceIp"] + assert identity.user == event["requestContext"]["identity"]["user"] + assert identity.user_agent == event["requestContext"]["identity"]["userAgent"] + assert identity.user_arn == event["requestContext"]["identity"]["userArn"] + + assert request_context.path == event["requestContext"]["path"] + assert request_context.protocol == event["requestContext"]["protocol"] + assert request_context.request_id == event["requestContext"]["requestId"] + assert request_context.request_time == event["requestContext"]["requestTime"] + assert request_context.request_time_epoch == event["requestContext"]["requestTimeEpoch"] + assert request_context.resource_id == event["requestContext"]["resourceId"] + assert request_context.resource_path == event["requestContext"]["resourcePath"] + assert request_context.stage == event["requestContext"]["stage"] + + assert event.path_parameters == event["pathParameters"] + assert event.stage_variables == event["stageVariables"] + assert event.body == event["body"] + assert event.is_base64_encoded == event["isBase64Encoded"] + + assert request_context.connected_at is None + assert request_context.connection_id is None + assert request_context.event_type is None + assert request_context.message_direction is None + assert request_context.message_id is None + assert request_context.route_key is None + assert identity.api_key is None + assert identity.api_key_id is None + + +def test_api_gateway_proxy_v2_event(): + event = APIGatewayProxyEventV2(load_event("apiGatewayProxyV2Event.json")) + + assert event.version == event["version"] + assert event.route_key == event["routeKey"] + assert event.raw_path == event["rawPath"] + assert event.raw_query_string == event["rawQueryString"] + assert event.cookies == event["cookies"] + assert event.cookies[0] == "cookie1" + assert event.headers == event["headers"] + assert event.query_string_parameters == event["queryStringParameters"] + assert event.query_string_parameters["parameter2"] == "value" + + request_context = event.request_context + assert request_context.account_id == event["requestContext"]["accountId"] + assert request_context.api_id == event["requestContext"]["apiId"] + assert request_context.authorizer.jwt_claim == event["requestContext"]["authorizer"]["jwt"]["claims"] + assert request_context.authorizer.jwt_scopes == event["requestContext"]["authorizer"]["jwt"]["scopes"] + assert request_context.domain_name == event["requestContext"]["domainName"] + assert request_context.domain_prefix == event["requestContext"]["domainPrefix"] + + http = request_context.http + assert http.method == "POST" + assert http.path == "/my/path" + assert http.protocol == "HTTP/1.1" + assert http.source_ip == "IP" + assert http.user_agent == "agent" + + assert request_context.request_id == event["requestContext"]["requestId"] + assert request_context.route_key == event["requestContext"]["routeKey"] + assert request_context.stage == event["requestContext"]["stage"] + assert request_context.time == event["requestContext"]["time"] + assert request_context.time_epoch == event["requestContext"]["timeEpoch"] + + assert event.body == event["body"] + assert event.path_parameters == event["pathParameters"] + assert event.is_base64_encoded == event["isBase64Encoded"] + assert event.stage_variables == event["stageVariables"] + + +def test_base_proxy_event_get_query_string_value(): + default_value = "default" + set_value = "value" + + event = BaseProxyEvent({}) + value = event.get_query_string_value("test", default_value) + assert value == default_value + + event._data["queryStringParameters"] = {"test": set_value} + value = event.get_query_string_value("test", default_value) + assert value == set_value + + value = event.get_query_string_value("unknown", default_value) + assert value == default_value + + value = event.get_query_string_value("unknown") + assert value is None + + +def test_base_proxy_event_get_header_value(): + default_value = "default" + set_value = "value" + + event = BaseProxyEvent({"headers": {}}) + value = event.get_header_value("test", default_value) + assert value == default_value + + event._data["headers"] = {"test": set_value} + value = event.get_header_value("test", default_value) + assert value == set_value + + value = event.get_header_value("unknown", default_value) + assert value == default_value + + value = event.get_header_value("unknown") + assert value is None + + +def test_kinesis_stream_event(): + event = KinesisStreamEvent(load_event("kinesisStreamEvent.json")) + + records = list(event.records) + assert len(records) == 2 + record = records[0] + + assert record.aws_region == "us-east-2" + assert record.event_id == "shardId-000000000006:49590338271490256608559692538361571095921575989136588898" + assert record.event_name == "aws:kinesis:record" + assert record.event_source == "aws:kinesis" + assert record.event_source_arn == "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + assert record.event_version == "1.0" + assert record.invoke_identity_arn == "arn:aws:iam::123456789012:role/lambda-role" + + kinesis = record.kinesis + assert kinesis._data["kinesis"] == event["Records"][0]["kinesis"] + + assert kinesis.approximate_arrival_timestamp == 1545084650.987 + assert kinesis.data == event["Records"][0]["kinesis"]["data"] + assert kinesis.kinesis_schema_version == "1.0" + assert kinesis.partition_key == "1" + assert kinesis.sequence_number == "49590338271490256608559692538361571095921575989136588898" + + assert kinesis.data_as_text() == "Hello, this is a test." + + +def test_kinesis_stream_event_json_data(): + json_value = {"test": "value"} + data = base64.b64encode(bytes(json.dumps(json_value), "utf-8")).decode("utf-8") + event = KinesisStreamEvent({"Records": [{"kinesis": {"data": data}}]}) + assert next(event.records).kinesis.data_as_json() == json_value + + +def test_alb_event(): + event = ALBEvent(load_event("albEvent.json")) + assert event.request_context.elb_target_group_arn == event["requestContext"]["elb"]["targetGroupArn"] + assert event.http_method == event["httpMethod"] + assert event.path == event["path"] + assert event.query_string_parameters == event["queryStringParameters"] + assert event.headers == event["headers"] + assert event.multi_value_query_string_parameters == event.get("multiValueQueryStringParameters") + assert event.multi_value_headers == event.get("multiValueHeaders") + assert event.body == event["body"] + assert event.is_base64_encoded == event["isBase64Encoded"] diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index 4d092131988..a3d471ab305 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -32,6 +32,14 @@ def metrics() -> List[Dict[str, str]]: ] +@pytest.fixture +def metrics_same_name() -> List[Dict[str, str]]: + return [ + {"name": "metric_one", "unit": MetricUnit.Count, "value": 1}, + {"name": "metric_one", "unit": MetricUnit.Count, "value": 5}, + ] + + @pytest.fixture def dimension() -> Dict[str, str]: return {"name": "test_dimension", "value": "test"} @@ -485,7 +493,7 @@ def lambda_handler(evt, context): output = capture_metrics_output(capsys) # THEN ColdStart metric and function_name dimension should be logged - assert output["ColdStart"] == 1 + assert output["ColdStart"] == [1.0] assert output["function_name"] == "example_fn" @@ -607,7 +615,7 @@ def lambda_handler(evt, ctx): def test_serialize_metric_set_metric_definition(metric, dimension, namespace, service, metadata): expected_metric_definition = { - "single_metric": 1.0, + "single_metric": [1.0], "_aws": { "Timestamp": 1592237875494, "CloudWatchMetrics": [ @@ -655,7 +663,7 @@ def lambda_handler(evt, context): # THEN ColdStart metric and function_name dimension should be logged # in a separate EMF blob than the application metrics - assert cold_start_blob["ColdStart"] == 1 + assert cold_start_blob["ColdStart"] == [1.0] assert cold_start_blob["function_name"] == "example_fn" assert cold_start_blob["service"] == service @@ -669,5 +677,65 @@ def lambda_handler(evt, context): # and that application metrics are recorded as normal assert custom_metrics_blob["service"] == service - assert custom_metrics_blob["single_metric"] == metric["value"] + assert custom_metrics_blob["single_metric"] == [float(metric["value"])] assert custom_metrics_blob["test_dimension"] == dimension["value"] + + +def test_log_multiple_metrics(capsys, metrics_same_name, dimensions, namespace): + # GIVEN Metrics is initialized + my_metrics = Metrics(namespace=namespace) + + for dimension in dimensions: + my_metrics.add_dimension(**dimension) + + # WHEN we utilize log_metrics to serialize + # and flush multiple metrics with the same name at the end of a function execution + @my_metrics.log_metrics + def lambda_handler(evt, ctx): + for metric in metrics_same_name: + my_metrics.add_metric(**metric) + + lambda_handler({}, {}) + output = capture_metrics_output(capsys) + expected = serialize_metrics(metrics=metrics_same_name, dimensions=dimensions, namespace=namespace) + + # THEN we should have no exceptions + # and a valid EMF object should be flushed correctly + remove_timestamp(metrics=[output, expected]) + assert expected == output + + +def test_serialize_metric_set_metric_definition_multiple_values( + metrics_same_name, dimension, namespace, service, metadata +): + expected_metric_definition = { + "metric_one": [1.0, 5.0], + "_aws": { + "Timestamp": 1592237875494, + "CloudWatchMetrics": [ + { + "Namespace": "test_namespace", + "Dimensions": [["test_dimension", "service"]], + "Metrics": [{"Name": "metric_one", "Unit": "Count"}], + } + ], + }, + "service": "test_service", + "username": "test", + "test_dimension": "test", + } + + # GIVEN Metrics is initialized and multiple metrics are added with the same name + my_metrics = Metrics(service=service, namespace=namespace) + for metric in metrics_same_name: + my_metrics.add_metric(**metric) + my_metrics.add_dimension(**dimension) + my_metrics.add_metadata(**metadata) + + # WHEN metrics are serialized manually + metric_definition_output = my_metrics.serialize_metric_set() + + # THEN we should emit a valid embedded metric definition object + assert "Timestamp" in metric_definition_output["_aws"] + remove_timestamp(metrics=[metric_definition_output, expected_metric_definition]) + assert metric_definition_output == expected_metric_definition diff --git a/tests/functional/test_tracing.py b/tests/functional/test_tracing.py index 59b93789907..577d37aeffb 100644 --- a/tests/functional/test_tracing.py +++ b/tests/functional/test_tracing.py @@ -61,6 +61,20 @@ def handler(event, context): handler({}, {}) +def test_tracer_chalice_cli_mode(monkeypatch, dummy_response): + # GIVEN tracer runs locally + monkeypatch.setenv("AWS_CHALICE_CLI_MODE", "true") + tracer = Tracer() + + # WHEN a lambda function is run through the Chalice CLI. + @tracer.capture_lambda_handler + def handler(event, context): + return dummy_response + + # THEN tracer should run in disabled mode, and not raise an Exception + handler({}, {}) + + def test_tracer_metadata_disabled(dummy_response): # GIVEN tracer is disabled, and annotations/metadata are used tracer = Tracer(disabled=True) diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py index f56a172637a..234f90e9f16 100644 --- a/tests/functional/test_utilities_batch.py +++ b/tests/functional/test_utilities_batch.py @@ -275,3 +275,18 @@ def lambda_handler(event, context): stubber.assert_no_pending_responses() assert result is True + + +def test_partial_sqs_processor_context_only_failure(sqs_event_factory, record_handler, partial_processor): + """ + Test processor with only failures + """ + first_record = sqs_event_factory("fail") + second_record = sqs_event_factory("fail") + + records = [first_record, second_record] + with pytest.raises(SQSBatchProcessingError) as error: + with partial_processor(records, record_handler) as ctx: + ctx.process() + + assert len(error.value.args[0]) == 2 diff --git a/tests/functional/validator/__init__.py b/tests/functional/validator/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/validator/conftest.py b/tests/functional/validator/conftest.py new file mode 100644 index 00000000000..5c154b5aab4 --- /dev/null +++ b/tests/functional/validator/conftest.py @@ -0,0 +1,358 @@ +import json + +import pytest + + +@pytest.fixture +def schema(): + return { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "http://example.com/example.json", + "type": "object", + "title": "Sample schema", + "description": "The root schema comprises the entire JSON document.", + "examples": [{"message": "hello world", "username": "lessa"}], + "required": ["message", "username"], + "properties": { + "message": { + "$id": "#/properties/message", + "type": "string", + "title": "The message", + "examples": ["hello world"], + }, + "username": { + "$id": "#/properties/username", + "type": "string", + "title": "The username", + "examples": ["lessa"], + }, + }, + } + + +@pytest.fixture +def schema_array(): + return { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "http://example.com/example.json", + "type": "array", + "title": "Sample schema", + "description": "Sample JSON Schema for dummy data in an array", + "examples": [[{"username": "lessa", "message": "hello world"}]], + "additionalItems": True, + "items": { + "$id": "#/items", + "anyOf": [ + { + "$id": "#/items/anyOf/0", + "type": "object", + "description": "Dummy data in an array", + "required": ["message", "username"], + "properties": { + "message": { + "$id": "#/items/anyOf/0/properties/message", + "type": "string", + "title": "The message", + "examples": ["hello world"], + }, + "username": { + "$id": "#/items/anyOf/0/properties/usernam", + "type": "string", + "title": "The username", + "examples": ["lessa"], + }, + }, + } + ], + }, + } + + +@pytest.fixture +def schema_response(): + return { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "http://example.com/example.json", + "type": "object", + "title": "Sample outgoing schema", + "description": "The root schema comprises the entire JSON document.", + "examples": [{"statusCode": 200, "body": "response"}], + "required": ["statusCode", "body"], + "properties": { + "statusCode": {"$id": "#/properties/statusCode", "type": "integer", "title": "The statusCode"}, + "body": {"$id": "#/properties/body", "type": "string", "title": "The response"}, + }, + } + + +@pytest.fixture +def raw_event(): + return {"message": "hello hello", "username": "blah blah"} + + +@pytest.fixture +def wrapped_event(): + return {"data": {"payload": {"message": "hello hello", "username": "blah blah"}}} + + +@pytest.fixture +def wrapped_event_json_string(): + return {"data": json.dumps({"payload": {"message": "hello hello", "username": "blah blah"}})} + + +@pytest.fixture +def wrapped_event_base64_json_string(): + return {"data": "eyJtZXNzYWdlIjogImhlbGxvIGhlbGxvIiwgInVzZXJuYW1lIjogImJsYWggYmxhaCJ9="} + + +@pytest.fixture +def raw_response(): + return {"statusCode": 200, "body": "response"} + + +@pytest.fixture +def apigateway_event(): + return { + "body": '{"message": "hello world", "username": "lessa"}', + "resource": "/{proxy+}", + "path": "/path/to/resource", + "httpMethod": "POST", + "isBase64Encoded": True, + "queryStringParameters": {"foo": "bar"}, + "multiValueQueryStringParameters": {"foo": ["bar"]}, + "pathParameters": {"proxy": "/path/to/resource"}, + "stageVariables": {"baz": "qux"}, + "headers": { + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "Accept-Encoding": "gzip, deflate, sdch", + "Accept-Language": "en-US,en;q=0.8", + "Cache-Control": "max-age=0", + "CloudFront-Forwarded-Proto": "https", + "CloudFront-Is-Desktop-Viewer": "true", + "CloudFront-Is-Mobile-Viewer": "false", + "CloudFront-Is-SmartTV-Viewer": "false", + "CloudFront-Is-Tablet-Viewer": "false", + "CloudFront-Viewer-Country": "US", + "Host": "1234567890.execute-api.us-east-1.amazonaws.com", + "Upgrade-Insecure-Requests": "1", + "User-Agent": "Custom User Agent String", + "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", + "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", + "X-Forwarded-For": "127.0.0.1, 127.0.0.2", + "X-Forwarded-Port": "443", + "X-Forwarded-Proto": "https", + }, + "multiValueHeaders": { + "Accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"], + "Accept-Encoding": ["gzip, deflate, sdch"], + "Accept-Language": ["en-US,en;q=0.8"], + "Cache-Control": ["max-age=0"], + "CloudFront-Forwarded-Proto": ["https"], + "CloudFront-Is-Desktop-Viewer": ["true"], + "CloudFront-Is-Mobile-Viewer": ["false"], + "CloudFront-Is-SmartTV-Viewer": ["false"], + "CloudFront-Is-Tablet-Viewer": ["false"], + "CloudFront-Viewer-Country": ["US"], + "Host": ["0123456789.execute-api.us-east-1.amazonaws.com"], + "Upgrade-Insecure-Requests": ["1"], + "User-Agent": ["Custom User Agent String"], + "Via": ["1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)"], + "X-Amz-Cf-Id": ["cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA=="], + "X-Forwarded-For": ["127.0.0.1, 127.0.0.2"], + "X-Forwarded-Port": ["443"], + "X-Forwarded-Proto": ["https"], + }, + "requestContext": { + "accountId": "123456789012", + "resourceId": "123456", + "stage": "prod", + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", + "requestTime": "09/Apr/2015:12:34:56 +0000", + "requestTimeEpoch": 1428582896000, + "path": "/prod/path/to/resource", + "resourcePath": "/{proxy+}", + "httpMethod": "POST", + "apiId": "1234567890", + "protocol": "HTTP/1.1", + }, + } + + +@pytest.fixture +def sns_event(): + return { + "Records": [ + { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "EventSubscriptionArn": "arn:aws:sns:us-east-1::ExampleTopic", + "Sns": { + "Type": "Notification", + "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", + "TopicArn": "arn:aws:sns:us-east-1:123456789012:ExampleTopic", + "Subject": "example subject", + "Message": '{"message": "hello world", "username": "lessa"}', + "Timestamp": "1970-01-01T00:00:00.000Z", + "SignatureVersion": "1", + "Signature": "EXAMPLE", + "SigningCertUrl": "EXAMPLE", + "UnsubscribeUrl": "EXAMPLE", + "MessageAttributes": { + "Test": {"Type": "String", "Value": "TestString"}, + "TestBinary": {"Type": "Binary", "Value": "TestBinary"}, + }, + }, + } + ] + } + + +@pytest.fixture +def kinesis_event(): + return { + "Records": [ + { + "kinesis": { + "partitionKey": "partitionKey-03", + "kinesisSchemaVersion": "1.0", + "data": "eyJtZXNzYWdlIjogImhlbGxvIGhlbGxvIiwgInVzZXJuYW1lIjogImJsYWggYmxhaCJ9=", + "sequenceNumber": "49545115243490985018280067714973144582180062593244200961", + "approximateArrivalTimestamp": 1428537600.0, + }, + "eventSource": "aws:kinesis", + "eventID": "shardId-000000000000:49545115243490985018280067714973144582180062593244200961", + "invokeIdentityArn": "arn:aws:iam::EXAMPLE", + "eventVersion": "1.0", + "eventName": "aws:kinesis:record", + "eventSourceARN": "arn:aws:kinesis:EXAMPLE", + "awsRegion": "us-east-1", + } + ] + } + + +@pytest.fixture +def eventbridge_event(): + return { + "id": "cdc73f9d-aea9-11e3-9d5a-835b769c0d9c", + "detail-type": "Scheduled Event", + "source": "aws.events", + "account": "123456789012", + "time": "1970-01-01T00:00:00Z", + "region": "us-east-1", + "resources": ["arn:aws:events:us-east-1:123456789012:rule/ExampleRule"], + "detail": {"message": "hello hello", "username": "blah blah"}, + } + + +@pytest.fixture +def sqs_event(): + return { + "Records": [ + { + "messageId": "19dd0b57-b21e-4ac1-bd88-01bbb068cb78", + "receiptHandle": "MessageReceiptHandle", + "body": '{"message": "hello world", "username": "lessa"}', + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1523232000000", + "SenderId": "123456789012", + "ApproximateFirstReceiveTimestamp": "1523232000001", + }, + "messageAttributes": {}, + "md5OfBody": "7b270e59b47ff90a553787216d55d91d", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:MyQueue", + "awsRegion": "us-east-1", + }, + ] + } + + +@pytest.fixture +def cloudwatch_logs_event(): + return { + "awslogs": { + "data": "H4sIACZAXl8C/52PzUrEMBhFX2UILpX8tPbHXWHqIOiq3Q1F0ubrWEiakqTWofTdTYYB0YWL2d5zvnuTFellBIOedoiyKH5M0iwnlKH7HZL6dDB6ngLDfLFYctUKjie9gHFaS/sAX1xNEq525QxwFXRGGMEkx4Th491rUZdV3YiIZ6Ljfd+lfSyAtZloacQgAkqSJCGhxM6t7cwwuUGPz4N0YKyvO6I9WDeMPMSo8Z4Ca/kJ6vMEYW5f1MX7W1lVxaG8vqX8hNFdjlc0iCBBSF4ERT/3Pl7RbMGMXF2KZMh/C+gDpNS7RRsp0OaRGzx0/t8e0jgmcczyLCWEePhni/23JWalzjdu0a3ZvgEaNLXeugEAAA==" # noqa: E501 + } + } + + +@pytest.fixture +def cloudwatch_logs_schema(): + return { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "http://example.com/example.json", + "type": "array", + "title": "Sample schema", + "description": "Sample JSON Schema for CloudWatch Logs logEvents using structured dummy data", + "examples": [ + [ + { + "id": "eventId1", + "message": {"username": "lessa", "message": "hello world"}, + "timestamp": 1440442987000, + }, + { + "id": "eventId2", + "message": {"username": "dummy", "message": "hello world"}, + "timestamp": 1440442987001, + }, + ] + ], + "additionalItems": True, + "items": { + "$id": "#/items", + "anyOf": [ + { + "$id": "#/items/anyOf/0", + "type": "object", + "title": "The first anyOf schema", + "description": "Actual log data found in CloudWatch Logs logEvents key", + "required": ["id", "message", "timestamp"], + "properties": { + "id": { + "$id": "#/items/anyOf/0/properties/id", + "type": "string", + "title": "The id schema", + "description": "Unique identifier for log event", + "default": "", + "examples": ["eventId1"], + }, + "message": { + "$id": "#/items/anyOf/0/properties/message", + "type": "object", + "title": "The message schema", + "description": "Log data captured in CloudWatch Logs", + "default": {}, + "examples": [{"username": "lessa", "message": "hello world"}], + "required": ["username", "message"], + "properties": { + "username": { + "$id": "#/items/anyOf/0/properties/message/properties/username", + "type": "string", + "title": "The username", + "examples": ["lessa"], + }, + "message": { + "$id": "#/items/anyOf/0/properties/message/properties/message", + "type": "string", + "title": "The message", + "examples": ["hello world"], + }, + }, + "additionalProperties": True, + }, + "timestamp": { + "$id": "#/items/anyOf/0/properties/timestamp", + "type": "integer", + "title": "The timestamp schema", + "description": "Log event epoch timestamp in milliseconds", + "default": 0, + "examples": [1440442987000], + }, + }, + } + ], + }, + } diff --git a/tests/functional/validator/test_validator.py b/tests/functional/validator/test_validator.py new file mode 100644 index 00000000000..c0e12792e73 --- /dev/null +++ b/tests/functional/validator/test_validator.py @@ -0,0 +1,131 @@ +import jmespath +import pytest +from jmespath import functions + +from aws_lambda_powertools.utilities.validation import envelopes, exceptions, validate, validator + + +def test_validate_raw_event(schema, raw_event): + validate(event=raw_event, schema=schema) + + +def test_validate_wrapped_event_raw_envelope(schema, wrapped_event): + validate(event=wrapped_event, schema=schema, envelope="data.payload") + + +def test_validate_json_string_envelope(schema, wrapped_event_json_string): + validate(event=wrapped_event_json_string, schema=schema, envelope="powertools_json(data).payload") + + +def test_validate_base64_string_envelope(schema, wrapped_event_base64_json_string): + validate(event=wrapped_event_base64_json_string, schema=schema, envelope="powertools_json(powertools_base64(data))") + + +def test_validate_event_does_not_conform_with_schema(schema): + with pytest.raises(exceptions.SchemaValidationError): + validate(event={"message": "hello_world"}, schema=schema) + + +def test_validate_json_string_no_envelope(schema, wrapped_event_json_string): + # WHEN data key contains a JSON String + with pytest.raises(exceptions.SchemaValidationError, match=".*data must be object"): + validate(event=wrapped_event_json_string, schema=schema, envelope="data.payload") + + +def test_validate_invalid_schema_format(raw_event): + with pytest.raises(exceptions.InvalidSchemaFormatError): + validate(event=raw_event, schema="schema.json") + + +def test_validate_invalid_envelope_expression(schema, wrapped_event): + with pytest.raises(exceptions.InvalidEnvelopeExpressionError): + validate(event=wrapped_event, schema=schema, envelope=True) + + +def test_validate_invalid_event(schema): + b64_event = "eyJtZXNzYWdlIjogImhlbGxvIGhlbGxvIiwgInVzZXJuYW1lIjogImJsYWggYmxhaCJ9=" + with pytest.raises(exceptions.SchemaValidationError): + validate(event=b64_event, schema=schema) + + +def test_apigateway_envelope(schema, apigateway_event): + # Payload v1 and v2 remains consistent where the payload is (body) + validate(event=apigateway_event, schema=schema, envelope=envelopes.API_GATEWAY_REST) + validate(event=apigateway_event, schema=schema, envelope=envelopes.API_GATEWAY_HTTP) + + +def test_sqs_envelope(sqs_event, schema_array): + validate(event=sqs_event, schema=schema_array, envelope=envelopes.SQS) + + +def test_sns_envelope(schema, sns_event): + validate(event=sns_event, schema=schema, envelope=envelopes.SNS) + + +def test_eventbridge_envelope(schema, eventbridge_event): + validate(event=eventbridge_event, schema=schema, envelope=envelopes.EVENTBRIDGE) + + +def test_kinesis_data_stream_envelope(schema_array, kinesis_event): + validate(event=kinesis_event, schema=schema_array, envelope=envelopes.KINESIS_DATA_STREAM) + + +def test_cloudwatch_logs_envelope(cloudwatch_logs_schema, cloudwatch_logs_event): + validate(event=cloudwatch_logs_event, schema=cloudwatch_logs_schema, envelope=envelopes.CLOUDWATCH_LOGS) + + +def test_validator_incoming(schema, raw_event): + @validator(inbound_schema=schema) + def lambda_handler(evt, context): + pass + + lambda_handler(raw_event, {}) + + +def test_validator_incoming_envelope(schema, apigateway_event): + @validator(inbound_schema=schema, envelope=envelopes.API_GATEWAY_REST) + def lambda_handler(evt, context): + pass + + lambda_handler(apigateway_event, {}) + + +def test_validator_outgoing(schema_response, raw_response): + @validator(outbound_schema=schema_response) + def lambda_handler(evt, context): + return raw_response + + lambda_handler({}, {}) + + +def test_validator_incoming_and_outgoing(schema, schema_response, raw_event, raw_response): + @validator(inbound_schema=schema, outbound_schema=schema_response) + def lambda_handler(evt, context): + return raw_response + + lambda_handler(raw_event, {}) + + +def test_validator_propagates_exception(schema, raw_event, schema_response): + @validator(inbound_schema=schema, outbound_schema=schema_response) + def lambda_handler(evt, context): + raise ValueError("Bubble up") + + with pytest.raises(ValueError): + lambda_handler(raw_event, {}) + + +def test_custom_jmespath_function_overrides_builtin_functions(schema, wrapped_event_json_string): + class CustomFunctions(functions.Functions): + @functions.signature({"types": ["string"]}) + def _func_echo_decoder(self, value): + return value + + jmespath_opts = {"custom_functions": CustomFunctions()} + with pytest.raises(jmespath.exceptions.UnknownFunctionError, match="Unknown function: powertools_json()"): + validate( + event=wrapped_event_json_string, + schema=schema, + envelope="powertools_json(data).payload", + jmespath_options=jmespath_opts, + )