From 17565578c7f3a5443943ca68e481126825825439 Mon Sep 17 00:00:00 2001 From: Cody Hiar Date: Thu, 17 Feb 2022 13:57:09 -0700 Subject: Initial commit --- .gitignore | 1 + .python-version | 1 + Makefile | 29 +++++++++++++++++++ README.md | 9 ++++++ bin/build.sh | 17 +++++++++++ docker-compose.yml | 21 ++++++++++++++ requirements.txt | 1 + sqs_sample.py | 85 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 8 files changed, 164 insertions(+) create mode 100644 .gitignore create mode 100644 .python-version create mode 100644 Makefile create mode 100644 README.md create mode 100755 bin/build.sh create mode 100644 docker-compose.yml create mode 100644 requirements.txt create mode 100644 sqs_sample.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1d17dae --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.venv diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..b04bfd8 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.9.9 diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..e1a8b92 --- /dev/null +++ b/Makefile @@ -0,0 +1,29 @@ +.PHONY: build + +# Shorcut for calling compose commands +DOCKER_COMPOSE = docker-compose -p parsely_localstack + +# https://blog.byronjsmith.com/makefile-shortcuts.html +# This allow us to launch tools without having to `souce .venv/bin/activate` first +VENV = .venv +export VIRTUAL_ENV := $(abspath ${VENV}) +export PATH := ${VIRTUAL_ENV}/bin:${PATH} + +help: + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + +build: ## Build the virtual environment + bin/build.sh + +up: build ## Bring up the environment + ${DOCKER_COMPOSE} up -d + +start: ## Run the sample script + awslocal sqs create-queue --queue-name sample-queue + python sqs_sample.py + +down: ## shutdown external services + ${DOCKER_COMPOSE} down + +clean: down ## Remove the virtual environment and shut down services + rm -rf ${VENV} diff --git a/README.md b/README.md new file mode 100644 index 0000000..5788507 --- /dev/null +++ b/README.md @@ -0,0 +1,9 @@ +# Localstack + +This is a little toy project to look at how to use `localstack` to mock out AWS +services. To run the project: + +```bash +make up +make start +``` diff --git a/bin/build.sh b/bin/build.sh new file mode 100755 index 0000000..f7fb60e --- /dev/null +++ b/bin/build.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -euo pipefail + +# This reads this .python-version file and will install that version if it +# doesn't already exist. Only install if the pyenv command is present, if we +# are running this in an environment where the host python is already set, like +# a docker container or provisioned host, we can skip using pyenv +[[ -x "$(command -v pyenv)" ]] && pyenv install --skip-existing + +# If the virtualenv already exits, exit cleanly +[[ -d ".venv" ]] && echo "Virtual environment already exists" && exit 0 + +# Create the virtual env +python -m venv --prompt localstack .venv + +# Install the requirements +./.venv/bin/pip install -r requirements.txt diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..0db70ac --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,21 @@ +version: '3.8' + +services: + localstack: + container_name: localstack + environment: + - DEBUG=1 + - LOCALSTACK_HOSTNAME=localhost + - TEST_AWS_ACCOUNT_ID=000000000000 + - AWS_DEFAULT_REGION=us-west-2 + - DOCKER_HOST=unix:///var/run/docker.sock + - DATA_DIR=/tmp/localstack/data + - KINESIS_STREAM_SHARDS=1 + - KINESIS_ERROR_PROBABILITY=0.0 + - KINESIS_STREAM_NAME=kinesis-stream + - KINESIS_PROVIDER=kinesalite + image: localstack/localstack:latest + ports: + - "4566:4566" + volumes: + - /var/run/docker.sock:/var/run/docker.sock diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..434484e --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +awscli-local diff --git a/sqs_sample.py b/sqs_sample.py new file mode 100644 index 0000000..d95f361 --- /dev/null +++ b/sqs_sample.py @@ -0,0 +1,85 @@ +import os +import json +import uuid +import boto3 + +# inside docker use docker dns name localstack +# os.environ['LOCALSTACK_SQS_ENDPOINT_URL'] = 'http://localstack:4576' + +# if your connecting to the localstack outside docker use host dns +# each aws service has its own endpoint url ensure boto3 client is configured accordingly +# you can change endpoint_url to point to any local aws stack e.g aws local dynamodb instance +os.environ["LOCALSTACK_SQS_ENDPOINT_URL"] = "http://localhost:4566" +os.environ["AWS_ACCESS_KEY_ID"] = "foo" +os.environ["AWS_SECRET_ACCESS_KEY"] = "bar" +os.environ["AWS_DEFAULT_REGION"] = "us-east-1" + +sqs = boto3.client("sqs", endpoint_url=os.environ["LOCALSTACK_SQS_ENDPOINT_URL"]) + + +body = { + "time": { + "updated": "Jul 4, 2020 14:12:00 UTC", + "updatedISO": "2020-07-04T14:12:00+00:00", + "updateduk": "Jul 4, 2020 at 15:12 BST", + }, + "disclaimer": "This data was produced from the CoinDesk Bitcoin Price Index (USD). Non-USD currency data converted using hourly conversion rate from openexchangerates.org", + "bpi": { + "USD": { + "code": "USD", + "rate": "9,083.8632", + "descriptio n": "United States Dollar", + "rate_float": 9083.8632, + }, + "BTC": { + "code": "BTC", + "rate": "1.0000", + "description": "Bitcoin", + "rate_float": 1, + }, + }, +} + +# Below is your typical message sending and receiving with long polling +print("Sending Message") +response = sqs.send_message( + QueueUrl="http://localhost:4566/00000000000/sample-queue", + MessageBody=json.dumps(body), + DelaySeconds=3, + MessageDeduplicationId=str(uuid.uuid4()), + MessageAttributes={ + "contentType": {"StringValue": "application/json", "DataType": "String"} + }, +) +print(response) + + +print("Waiting for message") +# WaitTimeSeconds=20 enables longer polling this means less read cycles to SQS reducing your costs if running in production +messages = sqs.receive_message( + QueueUrl="http://localhost:4566/00000000000/sample-queue", + AttributeNames=["All"], + MaxNumberOfMessages=10, + WaitTimeSeconds=20, + VisibilityTimeout=30, +) + + +messages = messages.get("Messages", []) +print("Total messages = {}".format(len(messages))) +for message in messages: + message_body = json.loads(message.get("Body")) + print(message_body) + sqs.delete_message( + QueueUrl="http://localhost:4566/00000000000/sample-queue", + ReceiptHandle=message.get("ReceiptHandle"), + ) + messages = sqs.receive_message( + QueueUrl="http://localhost:4566/00000000000/sample-queue", + AttributeNames=["All"], + MaxNumberOfMessages=10, + WaitTimeSeconds=20, + VisibilityTimeout=30, + ) + messages = messages.get("Messages", []) + print("Total messages remaining ={}".format(len(messages))) -- cgit v1.2.3