Skip to content

Commit

Permalink
Merge pull request #24 from LSSTDESC/resspect_connect
Browse files Browse the repository at this point in the history
Refactor tests so pytest -v inside /tests in the docker compose environment works.  Also Issue #17
  • Loading branch information
mtauraso authored Nov 22, 2024
2 parents 9160ab7 + 3f46d96 commit aebb2dc
Show file tree
Hide file tree
Showing 36 changed files with 1,093 additions and 2,370 deletions.
694 changes: 1 addition & 693 deletions fastdb_get_alerts/load_ppdb_from_alerts.py

Large diffs are not rendered by default.

536 changes: 536 additions & 0 deletions tests/alertcycle_testbase.py

Large diffs are not rendered by default.

511 changes: 0 additions & 511 deletions tests/alertcyclefixtures.py

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions tests/apiclassifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
import fastavro
import datetime

from msgconsumer import MsgConsumer
from testmsgconsumer import MsgConsumer
from tom_client import TomClient

_rundir = pathlib.Path( __file__ ).parent

_logger = logging.getLogger( __name__ )
_logger = logging.getLogger( "tests/apiclassifier" )
_logger.propagate = False
if not _logger.hasHandlers():
_logout = logging.StreamHandler( sys.stderr )
Expand Down
153 changes: 139 additions & 14 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@
import subprocess
import pytest

from pymongo import MongoClient

# Make sure the django environment is fully set up
sys.path.insert( 0, "/tom_desc" )
os.environ["DJANGO_SETTINGS_MODULE"] = "tom_desc.settings"
import django
Expand All @@ -16,30 +19,152 @@
import elasticc2.models
from tom_client import TomClient

# Additional fixtures in other files
# sys.path.insert( 0, os.getenv("PWD") )
# pytest_plugins = [ 'alertcyclefixtures' ]


@pytest.fixture( scope="session" )
def tomclient():
return TomClient( "http://tom:8080", username="root", password="testing" )

@pytest.fixture( scope="session" )
def mongoclient():
host = os.getenv( 'MONGOHOST' )
username = os.getenv( 'MONGODB_ALERT_READER' )
password = os.getenv( 'MONGODB_ALERT_READER_PASSWORD' )
client = MongoClient( f"mongodb://{username}:{password}@{host}:27017/?authSource=alerts" )
return client

@pytest.fixture( scope="session" )
def apibroker_client():
return TomClient( "http://tom:8080", username="apibroker", password="testing" )

@pytest.fixture( scope="session" )
def elasticc2_ppdb( tomclient ):
basedir = pathlib.Path( "/elasticc2data" )
dirs = []
for subdir in basedir.glob( '*' ):
if subdir.is_dir():
result = subprocess.run( [ "python", "manage.py", "load_snana_fits", "-d", str(subdir), "--ppdb", "--do" ],
cwd="/tom_desc", capture_output=True )
assert result.returncode == 0
def load_elasticc2_database_snapshot( *args ):
models = args

for m in models:
assert m.objects.count() == 0

tables = [ m._meta.db_table for m in models ]
args = [ "pg_restore",
"--data-only",
"-h", "postgres",
"-U", "postgres",
"-d", "tom_desc" ]
for t in tables:
args.append( "-t" )
args.append( t )
args.append( "elasticc2_alertcycle_complete.psqlc" )
res = subprocess.run( args, cwd="/tests", env={ "PGPASSWORD": "fragile" }, capture_output=True )
assert res.returncode == 0

return models


@pytest.fixture
def elasticc2_ppdb():
models = load_elasticc2_database_snapshot( elasticc2.models.PPDBAlert,
elasticc2.models.PPDBDiaForcedSource,
elasticc2.models.PPDBDiaObject,
elasticc2.models.PPDBDiaSource,
elasticc2.models.DiaObjectTruth )
yield True
for m in models:
m.objects.all().delete()


@pytest.fixture( scope="class" )
def elasticc2_ppdb_class():
models = load_elasticc2_database_snapshot( elasticc2.models.PPDBAlert,
elasticc2.models.PPDBDiaForcedSource,
elasticc2.models.PPDBDiaObject,
elasticc2.models.PPDBDiaSource,
elasticc2.models.DiaObjectTruth )
yield True
for m in models:
m.objects.all().delete()


@pytest.fixture
def elasticc2_database_snapshot( elasticc2_ppdb ):
models = load_elasticc2_database_snapshot( elasticc2.models.BrokerClassifier,
elasticc2.models.BrokerMessage,
elasticc2.models.DiaForcedSource,
elasticc2.models.DiaObject,
elasticc2.models.DiaSource,
elasticc2.models.DiaObjectInfo,
elasticc2.models.BrokerSourceIds )
yield True
for m in models:
m.objects.all().delete()

@pytest.fixture( scope='class' )
def elasticc2_database_snapshot_class( elasticc2_ppdb_class ):
models = load_elasticc2_database_snapshot( elasticc2.models.BrokerClassifier,
elasticc2.models.BrokerMessage,
elasticc2.models.DiaForcedSource,
elasticc2.models.DiaObject,
elasticc2.models.DiaSource,
elasticc2.models.DiaObjectInfo,
elasticc2.models.BrokerSourceIds )
yield True
for m in models:
m.objects.all().delete()



@pytest.fixture( scope="class" )
def random_broker_classifications():
brokers = {
'rbc_test1': {
'1.0': {
'classifiertest1': [ '1.0' ],
'classifiertest2': [ '1.0' ]
}
},
'rbc_test2': {
'3.5': {
'testing1': [ '42' ],
'testing2': [ '23' ]
}
}
}

minsrc = 10
maxsrc = 20
mincls = 1
maxcls = 20

msgs = []
for brokername, brokerspec in brokers.items():
for brokerversion, versionspec in brokerspec.items():
for classifiername, clsspec in versionspec.items():
for classifierparams in clsspec:
nsrcs = random.randint( minsrc, maxsrc )
for src in range(nsrcs):
ncls = random.randint( mincls, maxcls )
probleft = 1.0
classes = []
probs = []
for cls in range( ncls ):
classes.append( cls )
prob = random.random() * probleft
probleft -= prob
probs.append( prob )
classes.append( ncls )
probs.append( probleft )

elasticc2.models.DiaObjectTruth.objects.all().delete()
elasticc2.models.PPDBAlert.objects.all().delete()
elasticc2.models.PPDBDiaForcedSource.objects.all().delete()
elasticc2.models.PPDBDiaSource.objects.all().delete()
elasticc2.models.PPDBDiaObject.objects.all().delete()
msgs.append( { 'sourceid': src,
'brokername': brokername,
'alertid': src,
'elasticcpublishtimestamp': datetime.datetime.now( tz=pytz.utc ),
'brokeringesttimestamp': datetime.datetime.now( tz=pytz.utc ),
'brokerversion': brokerversion,
'classifiername': classifiername,
'classifierparams': classifierparams,
'classid': classes,
'probability': probs } )

yield msgs

110 changes: 15 additions & 95 deletions tests/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
services:
kafka-zookeeper:
image: registry.nersc.gov/m1727/raknop/kafka
image: ${TOM_DOCKER_ARCHIVE:-ghcr.io/lsstdesc/tom_desc}/kafka:${TOM_DOCKER_VERSION:-latest}
build:
context: ../docker_kafka
healthcheck:
Expand All @@ -14,7 +14,7 @@ services:
depends_on:
kafka-zookeeper:
condition: service_healthy
image: registry.nersc.gov/m1727/raknop/kafka
image: ${TOM_DOCKER_ARCHIVE:-ghcr.io/lsstdesc/tom_desc}/kafka:${TOM_DOCKER_VERSION:-latest}
build:
context: ../docker_kafka
healthcheck:
Expand All @@ -24,17 +24,8 @@ services:
retries: 5
entrypoint: [ "bin/kafka-server-start.sh", "config/server.properties" ]

fakebroker:
depends_on:
kafka-server:
condition: service_healthy
image: registry.nersc.gov/m1727/raknop/fakebroker
build:
context: .
dockerfile: Dockerfile.fakebroker

postgres:
image: registry.nersc.gov/m1727/raknop/tom-postgres
image: ${TOM_DOCKER_ARCHIVE:-ghcr.io/lsstdesc/tom_desc}/tom-postgres:${TOM_DOCKER_VERSION:-latest}
build:
context: ../docker_postgres
target: tom-postgres
Expand All @@ -45,7 +36,7 @@ services:
retries: 5

mongodb:
image: registry.nersc.gov/m1727/rknop/tom-mongodb:latest
image: ${TOM_DOCKER_ARCHIVE:-ghcr.io/lsstdesc/tom_desc}/tom-mongodb:${TOM_DOCKER_VERSION:-latest}
build:
context: ../docker_mongodb
environment:
Expand Down Expand Up @@ -73,7 +64,7 @@ services:
condition: service_healthy
mongodb:
condition: service_healthy
image: registry.nersc.gov/m1727/raknop/tom_desc_bindmount
image: ${TOM_DOCKER_ARCHIVE:-ghcr.io/lsstdesc/tom_desc}/tom_desc_bindmount:${TOM_DOCKER_VERSION:-latest}
build:
context: ../
dockerfile: docker_server/Dockerfile
Expand Down Expand Up @@ -120,7 +111,7 @@ services:
depends_on:
createdb:
condition: service_completed_successfully
image: registry.nersc.gov/m1727/raknop/tom_desc_bindmount
image: ${TOM_DOCKER_ARCHIVE:-ghcr.io/lsstdesc/tom_desc}/tom_desc_bindmount:${TOM_DOCKER_VERSION:-latest}
build:
context: ../
dockerfile: docker_server/Dockerfile
Expand Down Expand Up @@ -154,79 +145,6 @@ services:
MONGODB_ALERT_READER_PASSWORD: reader


brokerpoll:
depends_on:
createdb:
condition: service_completed_successfully
tom:
condition: service_started
fakebroker:
condition: service_started
image: registry.nersc.gov/m1727/raknop/tom_desc_bindmount
build:
context: ../
dockerfile: docker_server/Dockerfile
target: tom-server-bindmount
volumes:
- type: bind
source: ../secrets
target: /secrets
- type: bind
source: ../tom_desc
target: /tom_desc
- type: volume
source: logs
target: /logs
environment:
LOGDIR: /logs
DB_NAME: tom_desc
DB_HOST: postgres
DB_USER: postgres
DB_PASS: fragile
DB_PORT: 5432
entrypoint: [ "python", "manage.py", "brokerpoll2", "--do-test", "--grouptag", "elasticc2" ]


brokerpoll_fastdb_dev:
depends_on:
createdb:
condition: service_completed_successfully
tom:
condition: service_started
fakebroker:
condition: service_started
image: registry.nersc.gov/m1727/raknop/tom_desc_bindmount
build:
context: ../
dockerfile: docker_server/Dockerfile
target: tom-server-bindmount
volumes:
- type: bind
source: ../secrets
target: /secrets
- type: bind
source: ../tom_desc
target: /tom_desc
- type: volume
source: logs
target: /logs
environment:
LOGDIR: /logs
MONGOHOST: mongodb
MONGODB_ADMIN: mongodb_admin
MONGODB_ADMIN_PASSWORD: fragile
MONGODB_ALERT_WRITER: mongodb_alert_writer
MONGODB_ALERT_WRITER_PASSWORD: writer
MONGODB_ALERT_READER: mongdb_alert_reader
MONGODB_ALERT_READER_PASSWORD: reader
DB_NAME: tom_desc
DB_HOST: postgres
DB_USER: postgres
DB_PASS: fragile
DB_PORT: 5432
entrypoint: [ "python", "manage.py", "fastdb_dev_brokerpoll", "--do-test", "--grouptag", "fastdb_dev" ]


# Thought required: want to make this dependent on
# createdb completed successfully, or just on the
# database servers being up? The advantage of the latter
Expand All @@ -241,13 +159,15 @@ services:
condition: service_healthy
tom:
condition: service_started
fakebroker:
condition: service_started
brokerpoll:
condition: service_started
brokerpoll_fastdb_dev:
condition: service_started
image: registry.nersc.gov/m1727/raknop/tom_server_bindmount_dev
kafka-server:
condition: service_healthy
# fakebroker:
# condition: service_started
# brokerpoll:
# condition: service_started
# brokerpoll_fastdb_dev:
# condition: service_started
image: ${TOM_DOCKER_ARCHIVE:-ghcr.io/lsstdesc/tom_desc}/tom_server_bindmount_dev:${TOM_DOCKER_VERSION:-latest}
build:
context: ../
dockerfile: docker_server/Dockerfile
Expand Down
Binary file added tests/elasticc2_alertcycle_complete.psqlc
Binary file not shown.
Loading

0 comments on commit aebb2dc

Please sign in to comment.