Commit c3e6f016 authored by William Fong's avatar William Fong

updating references to session

parent 42fb6d73
......@@ -14,11 +14,7 @@ def add_aperture(ctx, name, aperture_string):
Add an aperture definition to the database
"""
with db_from_config(ctx.obj["dbconf"]) as db:
check = (
db.session.query(Aperture)
.filter(Aperture.name == name)
.one_or_none()
)
check = db.query(Aperture).filter(Aperture.name == name).one_or_none()
s_r, i_r, o_r = Aperture.from_aperture_string(aperture_string)
......
......@@ -28,9 +28,7 @@ def add_frametype(ctx, frametype_name):
with db_from_config(ctx.obj["dbconf"]) as db:
# Check if we're updating or inserting
check = (
db.session.query(FrameType)
.filter_by(name=frametype_name)
.one_or_none()
db.query(FrameType).filter_by(name=frametype_name).one_or_none()
)
if check:
# Updating
......
......@@ -15,7 +15,7 @@ def add_lightcurvetype(ctx, lightcurve_type_name):
"""
with db_from_config(ctx.obj["dbconf"]) as db:
check = (
db.session.query(LightcurveType)
db.query(LightcurveType)
.filter(LightcurveType == lightcurve_type_name)
.one_or_none()
)
......
......@@ -68,7 +68,7 @@ def ingest(ctx, ephemeris_csv):
eph = SpacecraftEphemeris(
barycentric_dynamical_time=i, **dict(row)
)
db.session.merge(eph)
db.merge(eph)
click.echo("Added {0}".format(eph))
if not ctx.obj["dryrun"]:
db.commit()
......
......@@ -26,7 +26,7 @@ def reflect_psql_admin(engine):
def psql_tables(lcdb):
# PSQL typedefs a bunch of columns which SQLAlchemy complains about
# (but still perfectly utilizes)
return reflect_psql_admin(lcdb.session.get_bind())
return reflect_psql_admin(lcdb.get_bind())
def psql_catalog_tables():
......
......@@ -97,7 +97,7 @@ class CSVPacker(DataPacker):
def serialize_to_database(self, lcdb):
if len(self) > 0:
cursor = lcdb.session.connection().connection.cursor()
cursor = lcdb.connection().connection.cursor()
mass_ingest(
cursor,
......@@ -369,7 +369,7 @@ class LightpointPartitionReader(LightpointPartitionBlob):
)
# Copy lightpoints
connection = db.session.connection().connection
connection = db.connection().connection
work_mem_q = 'SET LOCAL work_mem TO "1GB"'
temp_buffers_q = 'SET LOCAL temp_buffers TO "2GB"'
......@@ -394,7 +394,7 @@ class LightpointPartitionReader(LightpointPartitionBlob):
# Remove duplication
obs_df = obs_df[~obs_df.index.duplicated(keep="last")]
db.session.execute(
db.execute(
Observation.upsert_q(), obs_df.reset_index().to_dict("records")
)
......
......@@ -205,7 +205,7 @@ class BaseBLSIngestor(BufferedDatabaseIngestor):
f"{len(cache)} relevant entries in db"
)
db.session.bulk_insert_mappings(
db.bulk_insert_mappings(
BLS,
filter(
lambda param: tuple(param[key] for key in keys) not in cache,
......
......@@ -85,7 +85,7 @@ def ingest_quat_file(db, filepath):
mask.add(model.date)
camera_quaternions.append(model)
db.session.add_all(camera_quaternions)
db.add_all(camera_quaternions)
db.flush()
......
......@@ -231,7 +231,7 @@ class BaseEM2ArrayIngestor(BufferedDatabaseIngestor):
self.log(f"Flushing {len(lightcurves):,} lightcurves")
start = datetime.now()
mgr = CopyManager(
db.session.connection().connection,
db.connection().connection,
models.ArrayOrbitLightcurve.__tablename__,
INGESTION_COLS,
)
......@@ -259,7 +259,7 @@ class BaseEM2ArrayIngestor(BufferedDatabaseIngestor):
.values(best_lcs)
.on_conflict_do_nothing()
)
db.session.execute(q)
db.execute(q)
end = datetime.now()
metric = models.QLPOperation(
......@@ -283,7 +283,7 @@ class BaseEM2ArrayIngestor(BufferedDatabaseIngestor):
runtime_parameters=self.determine_process_parameters(),
)
db.add(process)
db.session.flush()
db.flush()
self.log(
f"Updating runtime parameters to {process.runtime_parameters}"
)
......
......@@ -157,7 +157,7 @@ class DBLoader(Process):
}
)
t0 = datetime.utcnow()
self.db.session.execute(q, self.insert_buffer)
self.db.execute(q, self.insert_buffer)
t1 = datetime.utcnow()
self.insert_history.new_timing(len(self.insert_buffer), t0, t1)
new_insert_buffer = self.insert_history.get_new_buf_size(
......@@ -193,7 +193,7 @@ class DBLoader(Process):
)
)
t0 = datetime.utcnow()
self.db.session.execute(q, self.update_buffer)
self.db.execute(q, self.update_buffer)
t1 = datetime.utcnow()
self.update_history.new_timing(len(self.update_buffer), t0, t1)
new_update_buffer = self.update_history.get_new_buf_size(
......@@ -223,7 +223,7 @@ class DBLoader(Process):
# observation row, catch and retry when encountering Deadlock errors.
while True:
try:
self.db.session.execute(
self.db.execute(
Observation.upsert_dicts(), df.to_dict("records")
)
self.observation_buffer = []
......
......@@ -102,7 +102,7 @@ def Partitionable(partition_type, *columns):
parent, PGInherits.parent_oid == parent.oid
).filter(parent.relname == cls.__tablename__)
df = pd_read_sql(info_q.statement, db.session.bind)
df = pd_read_sql(info_q.statement, db.bind)
result = df["expression"].str.extract(partition_range_extr)
result[["begin_range", "end_range"]] = result[
......
......@@ -115,7 +115,7 @@ def load_ephemeris(db, ephemeris):
mask.add(row["JDTDB"])
objects.append(obj)
db.session.add_all(objects)
db.add_all(objects)
db.commit()
......
......@@ -495,8 +495,8 @@ def simulate_lightcurve_ingestion_environment(
]
db.add(background_type)
db.add(background_aperture)
db.session.add_all(magnitude_types)
db.session.add_all(photometric_apertures)
db.add_all(magnitude_types)
db.add_all(photometric_apertures)
note(f"PUSHING {photometric_apertures}")
note(f"{db.query(models.Aperture).all()}")
......
......@@ -139,7 +139,7 @@ def test_tjd_cache(db, raw_ffi_type, orbit, frames):
for frame in frames:
frame.frame_type = raw_ffi_type
frame.orbit = orbit
db.session.add_all(frames)
db.add_all(frames)
db.flush()
state_q = (
db.query(Frame.cadence, Frame.camera, Frame.tjd)
......
def import_lc_prereqs(db, lightcurves):
for lc in lightcurves:
db.session.merge(lc.aperture)
db.session.merge(lc.lightcurve_type)
db.merge(lc.aperture)
db.merge(lc.lightcurve_type)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment