diff --git a/backend/Dockerfile b/backend/Dockerfile index 123d85fe3..4dabfe02a 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -21,15 +21,11 @@ COPY ./manage.py ./ FROM build AS development COPY ./pytest.ini ./.coveragerc ./ COPY ./scripts/start_backend.dev.sh ./scripts/start_backend.dev.sh -COPY ./scripts/start_consumer.sh ./scripts/start_consumer.sh RUN chmod +x ./scripts/start_backend.dev.sh -RUN chmod +x ./scripts/start_consumer.sh # --------------------------------------- # Production # --------------------------------------- FROM build AS production COPY ./scripts/start_backend.prod.sh ./scripts/start_backend.prod.sh -COPY ./scripts/start_consumer.sh ./scripts/start_consumer.sh RUN chmod +x ./scripts/start_backend.prod.sh -RUN chmod +x ./scripts/start_consumer.sh diff --git a/backend/Pipfile b/backend/Pipfile index 07c2bbefb..702c3478c 100644 --- a/backend/Pipfile +++ b/backend/Pipfile @@ -43,7 +43,6 @@ numpy = "*" pytz = "*" django-activity-stream = "*" kafka-python = "*" -kafka-helper = "*" cronitor = "*" svix = "*" pyjwt = "*" diff --git a/backend/Pipfile.lock b/backend/Pipfile.lock index 0e4584c2d..179502aa7 100644 --- a/backend/Pipfile.lock +++ b/backend/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "2c9972797332615a0d9be5a75eec34b04cdc2e6711194fd2f745ff4e811a7aab" + "sha256": "d7cad7dfd4dffb7df758d2306e1311c078d29fa618660609ad3a34ce9f8114a5" }, "pipfile-spec": 6, "requires": { @@ -89,19 +89,19 @@ }, "boto3": { "hashes": [ - "sha256:19762b6a1adbe1963e26b8280211ca148017c970a2e1386312a9fc8a0a17dbd5", - "sha256:367a73c1ff04517849d8c4177fd775da2e258a3912ff6a497be258c30f509046" + "sha256:983ec9e539431c29b5265e435b91af7c0d77a75809e173427798edb4ede1d69c", + "sha256:f35a42c6d0130a75e58485efa94383256d9b8c72c3a31ad872807873a8800363" ], "index": "pypi", - "version": "==1.26.97" + "version": "==1.26.98" }, "botocore": { "hashes": [ - "sha256:0df677eb2bef3ba18ac69e007633559b4426df310eee99df9882437b5faf498a", - "sha256:176740221714c0f031c2cd773879df096dbc0f977c63b3e2ed6a956205f02e82" + "sha256:ae906c1feb56063a38ffd2280232fa44d634057825470d3beed274925088cb42", + "sha256:b74283ff71eb4e57edfa5cf6dc36d959b1eec618a2b1e5e781643184857dd1c4" ], "markers": "python_version >= '3.7'", - "version": "==1.29.97" + "version": "==1.29.98" }, "braintree": { "hashes": [ @@ -334,32 +334,28 @@ }, "cryptography": { "hashes": [ - "sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1", - "sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7", - "sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06", - "sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84", - "sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915", - "sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074", - "sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5", - "sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3", - "sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9", - "sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3", - "sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011", - "sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536", - "sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a", - "sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f", - "sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480", - "sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac", - "sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0", - "sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108", - "sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828", - "sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354", - "sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612", - "sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3", - "sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97" + "sha256:14da8c26755ffa5c7863ffa5e8b87cb9596a21b6c34852cb19e0f48c226c64fb", + "sha256:168ded448fb5d82dfa911156ab8b13b1716de65bd50ff977f4657643f998fa05", + "sha256:22e63fb48e2615cfab5a9c4bb457d35e7ae03ea8593996bfbe257e78244d12d0", + "sha256:23c42c59c2b5b9ddc6a85b5c46b8fabc4d63a1714f4dbea4bf20d25690bf2365", + "sha256:34f502619964210939bb7ee7cd5df53178534eb08d3526f941695a8f7aa0efe4", + "sha256:43089be365c0ca4235c6e4e781f3bc125bc1fff576c9dd22cdfb585309b9bb9d", + "sha256:6b36e2864e04c82634879c7e7aad48824b1847fdb06b64cd410d2ec5e51d1b31", + "sha256:7162ae4530958114ca2eee30a56eca46527def33493f622f059dc2e825fd0913", + "sha256:71cb346b9dd1537102e7466a2d629385b01847f8d96cd7405f0e717d91cebc8e", + "sha256:722cfddae79684166840be2cbbae154f44a455519e644b60bf274a50ccb834db", + "sha256:754dc5ab648113dc54197f242db43234a04e4d61193fb5d3ebb42bd569dca571", + "sha256:7cc9fc3ffcb766c313ed0515d77d0deabb4f36bdcff3a9f115c43e5ec611b82a", + "sha256:b05c9f25a1ea42e427085230815bbdebe15a53bb6163c4c06022e5630645046b", + "sha256:e5855a80c77565fe2464e88e0095764e25d8ddb2d24df2b1d31773e80be94435", + "sha256:e917a07094217edeefe8f6ea960b45d7aab650b982e4209da078332cc9d3ac3a", + "sha256:f2c4134d29cdce0735c16abf48fa8435f001a7b0031e68dd9a9ee1c80a29374a", + "sha256:f421f6777592eb199ca8abac7c20b9ecef27c50ad63546e6c614b29771b46d0d", + "sha256:fafa997b9e6818db333ded4b379f5b7679b48bd88ac878428cea2a1aa6e79fd8", + "sha256:fba36ec552794a06a07ac8bdc5ad83a587f6959d98547f373d401975d55c7c9e" ], "markers": "python_version >= '3.6'", - "version": "==39.0.2" + "version": "==40.0.0" }, "decorator": { "hashes": [ @@ -590,19 +586,19 @@ }, "faker": { "hashes": [ - "sha256:2deeee8fed3d1b8ae5f87d172d4569ddc859aab8693f7cd68eddc5d20400563a", - "sha256:e7c058e1f360f245f265625b32d3189d7229398ad80a8b6bac459891745de052" + "sha256:4c98c42984db54be2246d40e6407cd983db7b1511a70eaff64c3f383a51bace6", + "sha256:9bd71833146b844d848791b79720c7806108130c9603c7074123b3f77b4e97a1" ], "index": "pypi", - "version": "==18.3.0" + "version": "==18.3.1" }, "filelock": { "hashes": [ - "sha256:75997740323c5f12e18f10b494bc11c03e42843129f980f17c04352cc7b09d40", - "sha256:eb8f0f2d37ed68223ea63e3bddf2fac99667e4362c88b3f762e434d160190d18" + "sha256:6d332dc5c896f18ba93a21d987155e97c434a96d3fe4042ca70d0b3b46e3b470", + "sha256:9fc1734dbddcdcd4aaa02c160dd94db5272b92dfa859b44ec8df28e160b751f0" ], "markers": "python_version >= '3.7'", - "version": "==3.10.2" + "version": "==3.10.4" }, "frozendict": { "hashes": [ @@ -756,14 +752,6 @@ "markers": "python_version >= '3.8'", "version": "==2023.3.6" }, - "kafka-helper": { - "hashes": [ - "sha256:82bf53ed817f665cfc16b028e770eade657e1d457a7cc80977dd847baa892071", - "sha256:9d774ac0beafa9399d0f98c79ca48d1e103fd53361b0211cd8f8ddc7dcd955b1" - ], - "index": "pypi", - "version": "==0.2" - }, "kafka-python": { "hashes": [ "sha256:04dfe7fea2b63726cd6f3e79a2d86e709d608d74406638c5da33a01d45a9d7e3", @@ -1212,11 +1200,11 @@ }, "referencing": { "hashes": [ - "sha256:6ed80da91adc930de951c174fb3e5514b7d04276af54449c6b34100fda60c5c3", - "sha256:954bf1cf0d8bbbd95d3f507134c1748fe56cd88a870cab5c3c49fedcb3e198ee" + "sha256:2270377c40cae8a719c61405facdb6f795a7c815648d56537c7e19719ce59c37", + "sha256:5ed6b689659894a682fcf7cefd22d8f4a6b5dc716a412f33d25f327e7d1a7841" ], "markers": "python_version >= '3.8'", - "version": "==0.25.2" + "version": "==0.25.3" }, "reportlab": { "hashes": [ @@ -1368,25 +1356,26 @@ }, "ruff": { "hashes": [ - "sha256:05f7c26e60abb5fd0a8acad41e97dfaa0bafef978ef6c2406010267ef3106e1f", - "sha256:180dd0bc1b228532eb899259f72b84779ef10fdaa0fe3695bc28611266db1036", - "sha256:2bc3f6e7dc0ff0c916573f36aa54b671036951959f14e56956fb9bd348935ac0", - "sha256:4b442b55981ffe8a826b6b7d31c76f93f693730f66c0948555f341ebe1e72249", - "sha256:5b2e48a5c577605ec60f6384d8733a5d3e183109786ba2b60de2520d42594680", - "sha256:76cba6d3991a488ac5eef0efa30937603ce312bb71d5e05a517819e95c1837fe", - "sha256:7a2f74f9239407e4a88846f50142045cb2bead701cc4edae196367ce2e498e85", - "sha256:7cef73434774a43b19c40f228dae5837d9530b08bb3b7fd3233c695572553f19", - "sha256:7e20d7209c9e51f2d6fe19e04ed674331e5fed56b819442f6ecab4a78c15890a", - "sha256:7ecdfc38cc77fbc28cb875fe67107771ccee5fc414ccbb681f78c0ea76cb112a", - "sha256:8680dfa67946c8d8e2f17d68e44e2da9e3a1a12435cacb8c871653ed81a6030f", - "sha256:92205eb9ca54b30d89c34ba5e370961389cfde0c9ec6d4736ba0cce2bb83c150", - "sha256:b4d0de61317188f2e25875c41fcd5c8dba20252730d8543e43c747e9b54cf3ff", - "sha256:cc9d8f7831a59d957274e24645fa57acebda1280878c2fa8e46a8c3f6284a727", - "sha256:e8bab2372605a70ae94f8a19207e2b78df12b5a2ddb45890d6073913addd5aca", - "sha256:e8ee0a7ab66c1286fd5943dc9024eb76377b35dc3380ac378626b37b23a8a802" - ], - "index": "pypi", - "version": "==0.0.258" + "sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d", + "sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0", + "sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456", + "sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577", + "sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b", + "sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e", + "sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d", + "sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7", + "sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9", + "sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066", + "sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec", + "sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8", + "sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a", + "sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff", + "sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9", + "sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086", + "sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226" + ], + "index": "pypi", + "version": "==0.0.259" }, "s3transfer": { "hashes": [ @@ -1446,10 +1435,10 @@ }, "svix": { "hashes": [ - "sha256:85c70f9d67d2b6a693fc04efd1558fa2776cfbe00c9750e57e5f6862ca2a99a4" + "sha256:e78490b22caf3cc375ad189b9fd7e44997356b5d79fe94ed54e38084f7f1c291" ], "index": "pypi", - "version": "==0.82.0" + "version": "==0.82.1" }, "taxjar": { "hashes": [ @@ -1490,11 +1479,11 @@ }, "tzdata": { "hashes": [ - "sha256:2b88858b0e3120792a3c0635c23daf36a7d7eeeca657c323da299d2094402a0d", - "sha256:fe5f866eddd8b96e9fcba978f8e503c909b19ea7efda11e52e39494bad3a7bfa" + "sha256:905ae9e6744dd9ef5ce94d2aaa2dd00282fee38b670b2133407f23c388f110a1", + "sha256:c3b51b235b07f9f1889089c2264bcbeaaba260a63f89bea09e350ea4205eb95f" ], "markers": "python_version >= '2'", - "version": "==2022.7" + "version": "==2023.2" }, "uritemplate": { "hashes": [ @@ -2672,25 +2661,26 @@ }, "ruff": { "hashes": [ - "sha256:05f7c26e60abb5fd0a8acad41e97dfaa0bafef978ef6c2406010267ef3106e1f", - "sha256:180dd0bc1b228532eb899259f72b84779ef10fdaa0fe3695bc28611266db1036", - "sha256:2bc3f6e7dc0ff0c916573f36aa54b671036951959f14e56956fb9bd348935ac0", - "sha256:4b442b55981ffe8a826b6b7d31c76f93f693730f66c0948555f341ebe1e72249", - "sha256:5b2e48a5c577605ec60f6384d8733a5d3e183109786ba2b60de2520d42594680", - "sha256:76cba6d3991a488ac5eef0efa30937603ce312bb71d5e05a517819e95c1837fe", - "sha256:7a2f74f9239407e4a88846f50142045cb2bead701cc4edae196367ce2e498e85", - "sha256:7cef73434774a43b19c40f228dae5837d9530b08bb3b7fd3233c695572553f19", - "sha256:7e20d7209c9e51f2d6fe19e04ed674331e5fed56b819442f6ecab4a78c15890a", - "sha256:7ecdfc38cc77fbc28cb875fe67107771ccee5fc414ccbb681f78c0ea76cb112a", - "sha256:8680dfa67946c8d8e2f17d68e44e2da9e3a1a12435cacb8c871653ed81a6030f", - "sha256:92205eb9ca54b30d89c34ba5e370961389cfde0c9ec6d4736ba0cce2bb83c150", - "sha256:b4d0de61317188f2e25875c41fcd5c8dba20252730d8543e43c747e9b54cf3ff", - "sha256:cc9d8f7831a59d957274e24645fa57acebda1280878c2fa8e46a8c3f6284a727", - "sha256:e8bab2372605a70ae94f8a19207e2b78df12b5a2ddb45890d6073913addd5aca", - "sha256:e8ee0a7ab66c1286fd5943dc9024eb76377b35dc3380ac378626b37b23a8a802" - ], - "index": "pypi", - "version": "==0.0.258" + "sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d", + "sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0", + "sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456", + "sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577", + "sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b", + "sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e", + "sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d", + "sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7", + "sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9", + "sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066", + "sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec", + "sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8", + "sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a", + "sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff", + "sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9", + "sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086", + "sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226" + ], + "index": "pypi", + "version": "==0.0.259" }, "setuptools": { "hashes": [ diff --git a/backend/api/serializers/model_serializers.py b/backend/api/serializers/model_serializers.py index 8461368c7..18efb0e54 100644 --- a/backend/api/serializers/model_serializers.py +++ b/backend/api/serializers/model_serializers.py @@ -73,7 +73,11 @@ SVIX_CONNECTOR = settings.SVIX_CONNECTOR logger = logging.getLogger("django.server") -kafka_producer = Producer() +USE_KAFKA = settings.USE_KAFKA +if USE_KAFKA: + kafka_producer = Producer() +else: + kafka_producer = None class TagNameSerializer(TimezoneFieldMixin, serializers.ModelSerializer): @@ -1442,7 +1446,7 @@ def update(self, instance, validated_data): "payment_status", instance.payment_status ) instance.save() - if instance.payment_status == Invoice.PaymentStatus.PAID: + if instance.payment_status == Invoice.PaymentStatus.PAID and kafka_producer: kafka_producer.produce_invoice_pay_in_full( invoice=instance, payment_date=now_utc(), source="lotus_out_of_band" ) diff --git a/backend/api/views.py b/backend/api/views.py index 6746e8fa3..8e11a4c7b 100644 --- a/backend/api/views.py +++ b/backend/api/views.py @@ -170,6 +170,12 @@ POSTHOG_PERSON = settings.POSTHOG_PERSON SVIX_CONNECTOR = settings.SVIX_CONNECTOR IDEMPOTENCY_ID_NAMESPACE = settings.IDEMPOTENCY_ID_NAMESPACE +logger = logging.getLogger("django.server") +USE_KAFKA = settings.USE_KAFKA +if USE_KAFKA: + kafka_producer = Producer() +else: + kafka_producer = None logger = logging.getLogger("django.server") @@ -2146,10 +2152,6 @@ def post(self, request, format=None): ) -logger = logging.getLogger("django.server") -kafka_producer = Producer() - - def load_event(request: HttpRequest) -> Optional[dict]: """ Loads an event from the request body. @@ -2263,7 +2265,8 @@ def track_event(request): "organization_id": organization_pk, "event": transformed_event, } - kafka_producer.produce(customer_id, stream_events) + if kafka_producer: + kafka_producer.produce(customer_id, stream_events) except Exception as e: bad_events[idempotency_id] = str(e) continue diff --git a/backend/lotus/settings.py b/backend/lotus/settings.py index a9180054c..f684816d6 100644 --- a/backend/lotus/settings.py +++ b/backend/lotus/settings.py @@ -22,7 +22,6 @@ import dj_database_url import django_heroku import jwt -import kafka_helper import posthog import sentry_sdk from decouple import config @@ -114,6 +113,9 @@ IDEMPOTENCY_ID_NAMESPACE = uuid.UUID("904C0FFB-7005-414E-9B7D-8E3C5DDE266D") # CRM Integration VESSEL_API_KEY = config("VESSEL_API_KEY", default=None) +# Partial startup +USE_WEBHOOKS = not config("NO_WEBHOOKS", default=False, cast=bool) +USE_KAFKA = not config("NO_EVENTS", default=False, cast=bool) if SENTRY_DSN != "": if not DEBUG: @@ -330,7 +332,7 @@ def key_deserializer(key): KAFKA_NUM_PARTITIONS = config("NUM_PARTITIONS", default=10, cast=int) KAFKA_REPLICATION_FACTOR = config("REPLICATION_FACTOR", default=1, cast=int) KAFKA_HOST = config("KAFKA_URL", default="127.0.0.1:9092") -if KAFKA_HOST: +if KAFKA_HOST and USE_KAFKA: if "," not in KAFKA_HOST: KAFKA_HOST = KAFKA_HOST else: @@ -355,18 +357,10 @@ def key_deserializer(key): "client_id": "events-client", } - KAFKA_CERTIFICATE = config("KAFKA_CLIENT_CERT", default=None) - KAFKA_KEY = config("KAFKA_CLIENT_CERT_KEY", default=None) - KAFKA_CA = config("KAFKA_TRUSTED_CERT", default=None) KAFKA_SASL_USERNAME = config("KAFKA_SASL_USERNAME", default=None) KAFKA_SASL_PASSWORD = config("KAFKA_SASL_PASSWORD", default=None) - if KAFKA_CERTIFICATE and KAFKA_KEY and KAFKA_CA: - ssl_context = kafka_helper.get_kafka_ssl_context() - for cfg in [producer_config, consumer_config, admin_client_config]: - cfg["security_protocol"] = "SSL" - cfg["ssl_context"] = ssl_context - elif KAFKA_SASL_USERNAME and KAFKA_SASL_PASSWORD: + if KAFKA_SASL_USERNAME and KAFKA_SASL_PASSWORD: for cfg in [producer_config, consumer_config, admin_client_config]: cfg["security_protocol"] = "SASL_SSL" cfg["sasl_mechanism"] = "SCRAM-SHA-256" @@ -718,28 +712,33 @@ def immutable_file_test(path, url): django_heroku.settings(locals(), logging=False) # create svix events -if SVIX_API_KEY != "": - svix = Svix(SVIX_API_KEY) -elif SVIX_API_KEY == "" and SVIX_JWT_SECRET != "": - try: - dt = datetime.datetime.now(timezone.utc) - utc_time = dt.replace(tzinfo=timezone.utc) - utc_timestamp = utc_time.timestamp() - payload = { - "iat": utc_timestamp, - "exp": 2980500639, - "nbf": utc_timestamp, - "iss": "svix-server", - "sub": "org_23rb8YdGqMT0qIzpgGwdXfHirMu", - } - encoded = jwt.encode(payload, SVIX_JWT_SECRET, algorithm="HS256") - SVIX_API_KEY = encoded - hostname, _, ips = socket.gethostbyname_ex("svix-server") - svix = Svix(SVIX_API_KEY, SvixOptions(server_url=f"http://{ips[0]}:8071")) - except Exception: +if USE_WEBHOOKS: + if SVIX_API_KEY != "": + svix = Svix(SVIX_API_KEY) + elif SVIX_API_KEY == "" and SVIX_JWT_SECRET != "": + try: + dt = datetime.datetime.now(timezone.utc) + utc_time = dt.replace(tzinfo=timezone.utc) + utc_timestamp = utc_time.timestamp() + payload = { + "iat": utc_timestamp, + "exp": 2980500639, + "nbf": utc_timestamp, + "iss": "svix-server", + "sub": "org_23rb8YdGqMT0qIzpgGwdXfHirMu", + } + encoded = jwt.encode(payload, SVIX_JWT_SECRET, algorithm="HS256") + SVIX_API_KEY = encoded + hostname, _, ips = socket.gethostbyname_ex("svix-server") + svix = Svix(SVIX_API_KEY, SvixOptions(server_url=f"http://{ips[0]}:8071")) + except Exception: + logger.error("Error creating svix connector") + svix = None + else: svix = None else: svix = None + SVIX_CONNECTOR = svix if SVIX_CONNECTOR is not None: try: diff --git a/backend/metering_billing/invoice.py b/backend/metering_billing/invoice.py index 123464647..ed199c85b 100644 --- a/backend/metering_billing/invoice.py +++ b/backend/metering_billing/invoice.py @@ -7,7 +7,6 @@ from django.conf import settings from django.db.models import Q, Sum from django.db.models.query import QuerySet - from metering_billing.kafka.producer import Producer from metering_billing.payment_processors import PAYMENT_PROCESSOR_MAP from metering_billing.taxes import get_lotus_tax_rates, get_taxjar_tax_rates @@ -32,12 +31,11 @@ POSTHOG_PERSON = settings.POSTHOG_PERSON META = settings.META DEBUG = settings.DEBUG -# LOTUS_HOST = settings.LOTUS_HOST -# LOTUS_API_KEY = settings.LOTUS_API_KEY -# if LOTUS_HOST and LOTUS_API_KEY: -# lotus_python.api_key = LOTUS_API_KEY -# lotus_python.host = LOTUS_HOST -kafka_producer = Producer() +USE_KAFKA = settings.USE_KAFKA +if USE_KAFKA: + kafka_producer = Producer() +else: + kafka_producer = None def generate_invoice( @@ -159,7 +157,8 @@ def generate_invoice( sentry_sdk.capture_exception(e) invoice_created_webhook(invoice, organization) - kafka_producer.produce_invoice(invoice) + if kafka_producer: + kafka_producer.produce_invoice(invoice) return_list.append(invoice) return return_list @@ -740,7 +739,8 @@ def generate_balance_adjustment_invoice(balance_adjustment, draft=False): except Exception as e: sentry_sdk.capture_exception(e) invoice_created_webhook(invoice, organization) - kafka_producer.produce_invoice(invoice) + if kafka_producer: + kafka_producer.produce_invoice(invoice) return invoice diff --git a/backend/metering_billing/views/webhook_views.py b/backend/metering_billing/views/webhook_views.py index b08b46e67..bddbb27bc 100644 --- a/backend/metering_billing/views/webhook_views.py +++ b/backend/metering_billing/views/webhook_views.py @@ -17,7 +17,11 @@ STRIPE_WEBHOOK_SECRET = settings.STRIPE_WEBHOOK_SECRET STRIPE_TEST_SECRET_KEY = settings.STRIPE_TEST_SECRET_KEY STRIPE_LIVE_SECRET_KEY = settings.STRIPE_LIVE_SECRET_KEY -kafka_producer = Producer() +USE_KAFKA = settings.USE_KAFKA +if USE_KAFKA: + kafka_producer = Producer() +else: + kafka_producer = None def _invoice_paid_handler(event): @@ -29,11 +33,12 @@ def _invoice_paid_handler(event): if matching_invoice: matching_invoice.payment_status = Invoice.PaymentStatus.PAID matching_invoice.save() - kafka_producer.produce_invoice_pay_in_full( - invoice=matching_invoice, - payment_date=now_utc(), - source=PAYMENT_PROCESSORS.STRIPE, - ) + if kafka_producer: + kafka_producer.produce_invoice_pay_in_full( + invoice=matching_invoice, + payment_date=now_utc(), + source=PAYMENT_PROCESSORS.STRIPE, + ) def _invoice_updated_handler(event): diff --git a/backend/scripts/start_backend.dev.sh b/backend/scripts/start_backend.dev.sh index 841d4c75a..f92b405b6 100644 --- a/backend/scripts/start_backend.dev.sh +++ b/backend/scripts/start_backend.dev.sh @@ -1,5 +1,4 @@ while ! nc -q 1 db 5432 > "$ENV_FILE" +else + DOCKER_IMAGES+=("event-ingestion:latest" "event-guidance:latest" "docker.redpanda.com/vectorized/redpanda:v22.2.2") + SERVICES+=("event-ingestion" "event-guidance" "redpanda") +fi + +# Check if --no-beat flag was passed +if [[ "$*" == *"--no-beat"* ]]; then + echo "Skipping celery beat images/containers 🚀" + echo "NO_BEAT=true" >> "$ENV_FILE" +else + DOCKER_IMAGES+=("lotus-celery-beat:latest") + SERVICES+=("celery-beat") +fi + +# Check if --no-webhooks flag was passed +if [[ "$*" == *"--no-webhooks"* ]]; then + echo "Skipping svix images/containers 🚀" + echo "NO_WEBHOOKS=true" >> "$ENV_FILE" else - # macOS OSX or Linux - FILE=env/.env.dev - if [ -f "$FILE" ]; then - echo "Reading dev environment variables 🚀" - echo "Building and running Docker image! 🚀" - docker-compose -f docker-compose.dev.yaml --env-file env/.env.dev up --build - else - echo "Creating dev environment variables 🚀" - cp env/.env.dev.example env/.env.dev - echo "env file created." - echo "Please consider replacing the .env.dev file content with custom values!" - echo "Building and running Docker image! 🚀" - docker-compose -f docker-compose.dev.yaml --env-file env/.env.dev up --build + DOCKER_IMAGES+=("svix/svix-server:latest") + SERVICES+=("svix-server") +fi + + + +# Build missing Docker images +for image in "${DOCKER_IMAGES[@]}"; do + if [[ "$(docker image inspect "$image" 2>/dev/null)" == "" ]]; then + echo "Building Docker image $image 🚀" + docker-compose $DOCKER_COMPOSE_ARGS build "$image" fi +done + +# Run Docker images +echo "Running Docker images 🚀" + +# Determine the appropriate options for `docker-compose up` +DOCKER_COMPOSE_UP_OPTIONS=() +if [[ "$*" == *"--force-recreate"* ]]; then + DOCKER_COMPOSE_UP_OPTIONS+=("--force-recreate") +fi +if [[ "$*" == *"--no-build"* ]]; then + : +else + DOCKER_COMPOSE_UP_OPTIONS+=("--build") fi + +# Run Docker images +echo "Running Docker images 🚀" +docker-compose $DOCKER_COMPOSE_ARGS up $DOCKER_COMPOSE_UP_OPTION "${SERVICES[@]}" diff --git a/scripts/self-host.sh b/scripts/self-host.sh index 59d787ab7..8479ed8c4 100755 --- a/scripts/self-host.sh +++ b/scripts/self-host.sh @@ -1,70 +1,31 @@ #!/bin/bash -# check to see what plarform -if [[ "$OSTYPE" == "win32" ]] || [[ "$OSTYPE" == "win64" ]]; then - # debian or Windows - FILE=env/.env.prod - if [ -f "$FILE" ]; then - echo "Reading prod environment variables 🚀" - - if [[ "$(docker image inspect lotus-frontend:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-celery:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-event_consumer:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-backend:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-celery-beat:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect svix/svix-server:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect redis:7-alpine 2>/dev/null)" == "" ]] && [[ "$(docker image inspect timescale/timescaledb-ha:latest 2>/dev/null)" == "" ]]; then - - echo "Building and running Docker image! 🚀" - docker-compose -f docker-compose.prod.yaml --env-file env/.env.prod up --build - - else - echo "Running Docker image! 🚀" - docker-compose -f docker-compose.prod.yaml --env-file env/.env.prod up --build - fi - - else +# check to see what platform - echo "Creating prod environment variables 🚀" - copy env/.env.prod.example env/.env.prod - echo "env file created." - echo "Please consider replacing the .env.prod file content with custom values!" - if [[ "$(docker image inspect lotus-frontend:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-celery:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-event_consumer:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-backend:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-celery-beat:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect svix/svix-server:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect redis:7-alpine 2>/dev/null)" == "" ]] && [[ "$(docker image inspect timescale/timescaledb-ha:latest 2>/dev/null)" == "" ]]; then - - echo "Building and running Docker image! 🚀" - docker-compose -f docker-compose.prod.yaml --env-file env/.env.prod up --build - - else - echo "Running Docker image! 🚀" - docker-compose -f docker-compose.prod.yaml --env-file env/.env.prod up --build - - fi - fi +# Determine platform-specific commands +if [[ "$OSTYPE" == "win32" ]] || [[ "$OSTYPE" == "win64" ]]; then + # Windows + COPY_CMD="copy" else - # macOS OSX or Linux - FILE=env/.env.prod - if [ -f "$FILE" ]; then - echo "Reading prod environment variables 🚀" - - if [[ "$(docker image inspect lotus-frontend:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-celery:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-event_consumer:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-backend:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-celery-beat:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect svix/svix-server:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect redis:7-alpine 2>/dev/null)" == "" ]] && [[ "$(docker image inspect timescale/timescaledb-ha:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect docker.redpanda.com/vectorized/redpanda:v22.2.2 2>/dev/null)" == "" ]]; then - echo "I do not get na" - echo "Building and running Docker image! 🚀" - docker-compose -f docker-compose.prod.yaml --env-file env/.env.prod up --build - - else - echo "Running Docker image! 🚀" - docker-compose -f docker-compose.prod.yaml --env-file env/.env.prod up --build - fi - - else - - echo "Creating prod environment variables 🚀" - cp env/.env.prod.example env/.env.prod - echo "env file created." - echo "Please consider replacing the .env.prod file content with custom values!" - - if [[ "$(docker image inspect lotus-frontend:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-celery:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-event_consumer:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-backend:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect lotus-celery-beat:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect svix/svix-server:latest 2>/dev/null)" == "" ]] && [[ "$(docker image inspect redis:7-alpine 2>/dev/null)" == "" ]] && [[ "$(docker image inspect timescale/timescaledb-ha:latest 2>/dev/null)" == "" ]]; then + # macOS or Linux + COPY_CMD="cp" +fi - echo "Building and running Docker image! 🚀" - docker-compose -f docker-compose.prod.yaml --env-file env/.env.prod up --build +# Set Docker images and compose args +DOCKER_IMAGES=("lotus-frontend:latest" "lotus-celery:latest" "event-ingestion:latest" "event-guidance:latest" "lotus-backend:latest" "lotus-celery-beat:latest" "svix/svix-server:latest" "redis:7-alpine" "timescale/timescaledb-ha:latest" "docker.redpanda.com/vectorized/redpanda:v22.2.2") +DOCKER_COMPOSE_ARGS="-f docker-compose.prod.yaml --env-file env/.env.prod" - else - echo "Running Docker image! 🚀" - docker-compose -f docker-compose.prod.yaml --env-file env/.env.prod up --build - fi - fi +# Check if environment variables file exists +ENV_FILE="env/.env.prod" +if [ -f "$ENV_FILE" ]; then + echo "Reading prod environment variables 🚀" +else + echo "Creating prod environment variables 🚀" + $COPY_CMD env/.env.prod.example env/.env.prod + echo "env file created." + echo "Please consider replacing the .env.prod file content with custom values!" fi + +# Run Docker images +echo "Running Docker images 🚀" +docker-compose $DOCKER_COMPOSE_ARGS up --build \ No newline at end of file