Source code for gavo.user.upgrade

"""
Stuff dealing with the upgrade of the database schema.

From software version 0.8.2 on, there is a dc.metastore table with a key
schemaversion.  Each change in the central schema increases the value
(interpreted as an integer) by one, and this module will contain a
corresponding upgrader.

An upgrader inherits form the Upgrader class.  See there for more details.

This module contains the current schemaversion expected by the software; gavo
upgrade does everything required to bring the what's in the database in sync
with the code (or so I hope).
"""

#c Copyright 2008-2023, the GAVO project <gavo@ari.uni-heidelberg.de>
#c
#c This program is free software, covered by the GNU GPL.  See the
#c COPYING file in the source distribution.


import os
import re
import sys

from psycopg2 import extensions

from gavo import base
from gavo import registry
from gavo import rsc
from gavo import rscdesc  #noflake: for cache registration
from gavo import utils


CURRENT_SCHEMAVERSION = 34


class _COMMIT(object):
	"""A sentinel used by iterStatements.
	"""


[docs]def getColumnNamesFor(tableId, connection): """returns (normalised) column names for tableId. (tableId is something like //tap#tables) """ t = rsc.TableForDef(base.resolveCrossId(tableId), connection=connection) return set(n for n, t in t.getColumnsFromDB(t.tableDef.getQName()))
[docs]def relationExists(tableName, connection): """returns True if tableName (as schema.name) exists, False otherwise. """ q = base.UnmanagedQuerier(connection) return q.getTableType(tableName) is not None
def _updateTAP_SCHEMA(connection): """re-ingests TAP_SCHEMA metadata for all RDs already mentioned in TAP_SCHEMA. """ from gavo.protocols import tap toDo = [r[0] for r in connection.query( "SELECT DISTINCT sourceRD FROM tap_schema.tables")] for rdId in toDo: if rdId=="__system__/tap": # TAP_SCHEMA needs special handling because of bootstrapping tap._insertRDIntoTAP_SCHEMA(base.caches.getRD(rdId), connection) continue try: tap.publishToTAP( base.caches.getRD(rdId, doQueries=False), connection) except Exception as msg: base.ui.notifyWarning("RD %s couldn't be loaded or ingested to" " TAP_SCHEMA (%s). Fix and run dachs imp -m on it to have" " up-to-date metadata in TAP_SCHEMA"%(rdId, msg)) continue
[docs]def iterEPNTAPTables(connection): """yields table descriptors for all tables that claim to conform to EPN-TAP. """ mth = base.caches.getMTH(None) for tableName, in connection.query("select table_name" " from tap_schema.tables" " where utype in (" " 'ivo://vopdc.obspm/std/epncore#schema-2.0'," " 'ivo://ivoa.net/std/epntap#table-2.0')"): try: yield mth.getTableDefForTable(tableName) except: base.ui.notifyWarning(f"Published EPN-TAP table {tableName}" " has no definition any more")
[docs]class AnnotatedString(str): """a string with an annotation. This is (optionally) used to hold SQL statements here; the annotation is shown to the user instead of the naked statement when present. """ def __new__(cls, content, annotation): res = str.__new__(cls, content) res.annotation = annotation return res
[docs]def showProgress(msg): """outputs msg to stdout without lf "immediately". """ sys.stdout.write(msg) sys.stdout.flush()
[docs]def getDBSchemaVersion(): """returns the schemaversion given in the database. This will return -1 if no schemaversion is declared. """ try: return int(base.getDBMeta("schemaversion")) except (KeyError, base.DBError): return -1
[docs]def dumpExtensionUpdater(conn): """prints SQL to bring [db]managedExtensions up to date. """ statements = [] for extName in base.getConfig("db", "managedextensions"): res = list(conn.query( "SELECT default_version, installed_version" " FROM pg_available_extensions" " WHERE name=%(extName)s", locals())) if res: def_version, ins_version = res[0] if ins_version is None: # extra treatment for some magic extensions that may have been # present from before postgres' proper extension mechanism. if extName in ['q3c', 'pg_sphere']: statements.append("CREATE EXTENSION %s FROM unpackaged;"%extName) else: statements.append("CREATE EXTENSION %s;"%extName) elif ins_version!=def_version: statements.append("ALTER EXTENSION %s UPDATE TO %s;"%( extName, extensions.adapt(def_version).getquoted().decode('utf-8'))) # else fall through (either the extension is not installed # or it's up to date) if statements: print(("\n".join(statements)+"\n")) return 0 else: return 1
[docs]class Upgrader(object): """A specification to upgrade from some schema version to another schema version. Upgraders live as uninstanciated classes. Their version attribute gives the version their instructions update *from*; their destination version therefore is version+1. An upgrader can have attributes s_<secno>_<something>, which are classmethods(connection) -> None performing schema changes. They always need to see if the manipulation already has been performed. Use relationExists and/or getColumnNamesFor as appropriate. An upgrader can have attributes named u_<seqno>_<something>. These can be * strings, which are then directly executed in the database (but you should only do that for idempotent operations) * classmethods, which will be executed in sequence (version, seqno) after *all* schemachange methods are executed. The methods will be called with a connection argument. You must not commit this connection. You must not swallow exceptions that have left the connection unready (i.e., require a rollback). Note that if you run rsc.makeData, you MUST pass both connection=connection and runCommit=False in order to avoid messing people's lives up. The updaters should have 1-line docstrings explaining what they do. The update of the schemaversion is done automatically, you don't need to worry about it. Note that whenever you change the schema in a system RD, the corresponding upgrader needs to be a schemachange; otherwise, it's very likely that any use of the system RD will fail, which usually makes an installation not upgradeable without manual intervention. """ version = None
[docs] @classmethod def updateSchemaversion(cls, connection): # no docstring, we output our info ourselves showProgress("> update schemaversion to %s..."%(cls.version+1)) base.setDBMeta(connection, "schemaversion", cls.version+1)
[docs] @classmethod def iterNormalActions(cls): """returns strings and classmethods that run non-schemachanging actions. """ for cmdAttr in (s for s in sorted(dir(cls)) if s.startswith("u_")): yield getattr(cls, cmdAttr) yield cls.updateSchemaversion
[docs] @classmethod def iterSchemaChanges(cls): """returns classmethods changing the schema as necessary. These always run before the normal actions. """ for cmdAttr in (s for s in sorted(dir(cls)) if s.startswith("s_")): yield getattr(cls, cmdAttr)
[docs]class To0Upgrader(Upgrader): """This is executed when there's no schema version defined in the database. The assumption is that the database reflects the status of 0.8, so it adds the author column in dc.services if necessary (which it's not if the software has been updated to 0.8.1). """ version = -1
[docs] @classmethod def s_000_addauthor(cls, connection): """add an author column to dc.services if necessary""" if "authors" in list(connection.queryToDicts( "SELECT * FROM dc.resources LIMIT 1"))[0]: return connection.query("alter table dc.resources add column authors") for sourceRD, resId in connection.query("select sourcrd, resid" " from dc.resources"): try: res = base.getRD(sourceRD).getById(resId) authors = "; ".join(m.getContent("text") #noflake: used through locals for m in res.iterMeta("creator.name", propagate=True)) except: # don't worry if fetching authors fails; people will notice... pass else: connection.query("update dc.resources set authors=%(authors)s" " where resid=%(resId)s and sourcerd=%(sourceRD)s", locals())
[docs] @classmethod def u_010_makeMetastore(cls, connection): """create the meta store""" td = base.caches.getRD("//dc_tables").getById("metastore") rsc.TableForDef(td, create=True, connection=connection)
[docs]class To1Upgrader(Upgrader): version = 0
[docs] @classmethod def u_000_update_funcs(cls, connection): """update GAVO server-side functions""" rsc.makeData(base.caches.getRD("//adql").getById("make_udfs"), connection=connection, runCommit=False)
[docs]class To2Upgrader(Upgrader): version = 1 @classmethod def _upgradeTable(cls, td, colName, connection): col = td.getColumnByName(colName) if not col.type=='double precision' or not col.xtype=='mjd': # this is not done via the mixin, it appears; give up return showProgress(td.getQName()+", ") connection.execute("ALTER TABLE %s ALTER COLUMN %s" " SET DATA TYPE DOUBLE PRECISION USING ts_to_mjd(%s)"% (td.getQName(), colName, colName)) rsc.TableForDef(td, connection=connection, create=False ).updateMeta()
[docs] @classmethod def u_000_siapDateObsToMJD(cls, connection): """change SIAP and SSAP dateObs columns to MJD""" dd = base.caches.getRD("//obscore", doQueries=False ).getById("refreshAfterSchemaUpdate") rsc.makeData(dd, connection=connection)
[docs]class To3Upgrader(Upgrader): version = 2
[docs] @classmethod def u_000_tapSchema(cls, connection): """add supportedmodels table to tap_schema""" rsc.makeData(base.caches.getRD("//tap").getById("createSchema"), connection=connection, runCommit=False)
[docs] @classmethod def u_010_declareObscoreModel(cls, connection): """declare obscore data model if the obscore table is present""" if list(connection.query( "select * from dc.tablemeta where tablename='ivoa.ObsCore'")): from gavo.protocols import tap rd = base.caches.getRD("//obscore") tap.publishToTAP(rd, connection) else: showProgress(" (not present)")
[docs]class To4Upgrader(Upgrader): version = 3
[docs] @classmethod def u_000_adqlfunctions(cls, connection): """update ADQL GAVO-defined functions for the postgres planner's benefit""" rsc.makeData(base.caches.getRD("//adql").getById("make_udfs"), connection=connection, runCommit=False)
[docs]class To5Upgrader(Upgrader): version = 4
[docs] @classmethod def u_000_updateObscore(cls, connection): """update obscore to work even when the table is empty""" rsc.TableForDef(base.caches.getRD("//obscore").getById("emptyobscore"), connection=connection, create=True) dd = base.caches.getRD("//obscore", doQueries=False ).getById("refreshAfterSchemaUpdate") rsc.makeData(dd, connection=connection)
[docs]class To6Upgrader(Upgrader): version = 5
[docs] @classmethod def s_010_addProductColumns(cls, connection): """add preview and datalink columns to dc.products""" cols = getColumnNamesFor("//products#products", connection) if "preview" not in cols: connection.execute("ALTER TABLE dc.products ADD COLUMN" " preview TEXT DEFAULT 'AUTO'") connection.execute("ALTER TABLE dc.products ALTER COLUMN" " preview DROP DEFAULT") if "datalink" not in cols: connection.execute("ALTER TABLE dc.products ADD COLUMN" " datalink TEXT")
[docs] @classmethod def u_020_remetaObscore(cls, connection): """update obscore metadata to fix the erroneous id""" rsc.makeData(base.caches.getRD("//obscore").getById("create"), connection=connection, runCommit=False, parseOptions=rsc.getParseOptions(metaOnly=True))
[docs]class To7Upgrader(Upgrader): version = 6
[docs] @classmethod def s_010_addPreviewMIMEColumn(cls, connection): """add preview_mime column to dc.products""" if not "preview_mime" in getColumnNamesFor( "//products#products", connection): connection.execute("ALTER TABLE dc.products ADD COLUMN" " preview_mime TEXT")
[docs]class To8Upgrader(Upgrader): version = 7
[docs] @classmethod def s_010_removeColumnsMeta(cls, connection): if relationExists("dc.columnmeta", connection): connection.execute("DROP TABLE dc.columnmeta")
[docs]class To9Upgrader(Upgrader): version = 8 u_010_chuckADQLPrefix = AnnotatedString("UPDATE TAP_SCHEMA.columns" " SET datatype=substring(datatype from 6)" " WHERE datatype LIKE 'adql:%%'", "Remove adql: prefix in TAP_SCHEMA.columns.datatype") u_020_setSize1OnAtoms = AnnotatedString("UPDATE tap_schema.columns" " SET \"size\"=1 WHERE NOT datatype LIKE '%%(*)'", "Set size=1 in TAP_SCHEMA.columns for atomic types") u_030_removeArrayMarkInText = AnnotatedString("UPDATE tap_schema.columns" " SET datatype=replace(datatype, '(*)', '') WHERE datatype LIKE '%%(*)'", "Turn VARCHAR(*) into simple VARCHAR (size=NULL already set for those)")
[docs]class To10Upgrader(Upgrader): version = 9
[docs] @classmethod def u_000_dropADQLExamples(cls, connection): """drop old TAP examples tables (gone to _examples meta)""" if relationExists("tap_schema.examples", connection): from gavo.user import dropping dropping._do_dropTable("tap_schema.examples", connection)
[docs] @classmethod def u_010_createDLAsyncTable(cls, connection): """import job table for async datalink""" if not relationExists("dc.datalinkjobs", connection): from gavo import rsc rsc.makeData(base.caches.getRD("//datalink").getById("import"), connection=connection, runCommit=False)
[docs]class To11Upgrader(Upgrader): version = 10
[docs] @classmethod def u_000_findMixedinTables(cls, connection): """inform about tables with non-trivial mixins""" # in reality, the mixins that really give us a headache here # are the ones mixin in products. Hence, we simply look # for tables that have both accref and embargo; that's # probably a certain indication. print("\n!! Important: column sequences" " of tables with some mixins have changed.") print("!! If this affects you, below commands are shown that will re-import") print("!! the affected tables. Some services on top of these tables may") print("!! be *broken* until these commands have run.") print("!! Sorry for this inconvenience; we hope it won't happen again.\n") for rdId in registry.findAllRDs(): if rdId.startswith("__system"): continue try: rd = base.caches.getRD(rdId) except: # ignore broken RDs -- services there are broken anyway continue ids = set() for td in rd.tables: try: td.getColumnByName("accref") and td.getColumnByName("embargo") except base.NotFoundError: continue # table not affected else: if not rsc.TableForDef(td, connection=connection, create=False ).exists(): continue # table needs re-importing, see if you can find a corresponding # data element for dd in rd.dds: for make in dd.makes: if make.table==td: ids.add(dd.id) if ids: print("dachs imp '%s' %s"%(rd.sourceId, " ".join("'%s'"%id for id in ids))) sys.stderr.write("\nEnd of scan of mixin-affected tables...")
[docs]class To12Upgrader(Upgrader): version = 11
[docs] @classmethod def u_010_updateTAPRecord(cls, connection): """prettify the TAP record's IVOID""" from gavo.registry import publication publication.updateServiceList([base.caches.getRD("//services")], connection=connection) publication.makeDeletedRecord( "ivo://"+base.getConfig("ivoa", "authority")+"/__system__/tap/run", connection)
[docs]class To13Upgrader(Upgrader): version = 12
[docs] @classmethod def u_010_updateObscore(cls, connection): """upgrade ivoa.obscore to obscore 1.1. """ if relationExists("ivoa._obscoresources", connection): dd = base.caches.getRD("//obscore", doQueries=False ).getById("refreshAfterSchemaUpdate") rsc.makeData(dd, connection=connection)
[docs]class To14Upgrader(Upgrader): version = 13
[docs] @classmethod def s_010_addColIndex(cls, connection): """Adding column_index column to TAP_SCHEMA.columns" """ dbCols = getColumnNamesFor("//tap#columns", connection) if not "column_index" in dbCols: connection.execute("ALTER TABLE TAP_SCHEMA.columns" " ADD COLUMN column_index SMALLINT") if not "arraysize" in dbCols: connection.execute("ALTER TABLE TAP_SCHEMA.columns" " ADD COLUMN arraysize TEXT") if not "xtype" in dbCols: connection.execute("ALTER TABLE TAP_SCHEMA.columns" " ADD COLUMN xtype TEXT")
[docs]class To15Upgrader(Upgrader): version = 14
[docs] @classmethod def s_10_add_uws_creationTime(cls, connection): """adding creationTime to UWS tables for UWS 1.1 support""" q = base.UnmanagedQuerier(connection) for tableId, tableName in [ ("//datalink#datalinkjobs", "dc.datalinkjobs"), ("//uws#userjobs", "uws.userjobs"), ("//tap#tapjobs", "tap_schema.tapjobs")]: if q.getTableType(tableName) is not None: if not "creationtime" in getColumnNamesFor(tableId, connection): connection.execute("ALTER TABLE %s" " ADD COLUMN creationTime TIMESTAMP"%tableName)
[docs]class To16Upgrader(Upgrader): version = 15 u_010_delete_obscore_1_0_model = AnnotatedString( # this purges the old ivoid unconditionally; that's never wrong, # because obscore 1.1 has one of its own. But it's also # fixing spurious publications where old versions of DaCHS # have blindly declared obscore support "DELETE FROM tap_schema.supportedmodels" " WHERE dmivorn='ivo://ivoa.net/std/ObsCore/v1.0'", "Deleting obscore 1.0 model support declaration")
[docs] @classmethod def s_10_upgrade_tap_schema_tables(cls, connection): """Adding 1.1 columns to TAP_SCHEMA.tables. """ if not "table_index" in getColumnNamesFor("//tap#tables", connection): connection.execute("ALTER TABLE TAP_SCHEMA.tables" " ADD COLUMN table_index SMALLINT")
[docs] @classmethod def s_20_upgrade_tap_schema_columns(cls, connection): """Adding 1.1 columns to TAP_SCHEMA.columns. """ dbCols = getColumnNamesFor("//tap#columns", connection) if not "arraysize" in dbCols: connection.execute("ALTER TABLE TAP_SCHEMA.columns" " ADD COLUMN arraysize TEXT") if not "xtype" in dbCols: connection.execute("ALTER TABLE TAP_SCHEMA.columns" " ADD COLUMN xtype TEXT")
[docs] @classmethod def u_50_update_tap_schema(cls, connection): """Filling new TAP_SCHEMA columns""" _updateTAP_SCHEMA(connection)
[docs] @classmethod def u_60_update_obscore_spectra(cls, connection): """Updating obscore-published spectral tables""" try: from gavo.rscdef import scripting script = base.resolveCrossId("//obscore#addTableToObscoreSources") script.notify = False with connection.savepoint(): for row in connection.query( "SELECT DISTINCT tablename FROM ivoa._obscoresources" " WHERE sqlfragment LIKE '%%''spectrum'' AS text) AS dataprod%%'"): table = rsc.TableForDef( base.getTableDefForTable(connection, row[0]), connection=connection) showProgress(" "+row[0]) scripting.PythonScriptRunner(script).run(table) dd = base.caches.getRD("//obscore", doQueries=False ).getById("create") rsc.makeData(dd, connection=connection, runCommit=False) except Exception: # probably there is no obscore table on this installation. showProgress("(skipped)")
[docs]class To17Upgrader(Upgrader): version = 16
[docs] @classmethod def u_10_import_adql(cls, connection): """loading new ADQL UDFs""" dd = base.caches.getRD("//adql", doQueries=False ).getById("make_udfs") rsc.makeData(dd, connection=connection, runCommit=False)
[docs] @classmethod def u_20_reload_tap_schema(cls, connection): """updating tap_schema.schemas conflict rule""" dd = base.caches.getRD("//tap", doQueries=False ).getById("createSchema") rsc.makeData(dd, connection=connection, runCommit=False, parseOptions=rsc.getParseOptions(metaOnly=False, systemImport=False))
[docs] @classmethod def u_30_update_obscore_meta(cls, connection): """updating obscore metadata""" if relationExists("ivoa._obscoresources", connection): dd = base.caches.getRD("//obscore", doQueries=False ).getById("create") rsc.makeData(dd, connection=connection, runCommit=False, parseOptions=rsc.getParseOptions(metaOnly=False, systemImport=True))
[docs]class To18Upgrader(Upgrader): version = 17
[docs] @classmethod def u_10_restoreTAP_SCHEMA(cls, connection): """recreating a possibly damaged TAP_SCHEMA""" _updateTAP_SCHEMA(connection)
[docs]class To19Upgrader(Upgrader): version = 18
[docs] @classmethod def s_10_add_schema_index(cls, connection): """adding schema_index to tap_schema.schemas""" if not "schema_index" in getColumnNamesFor("//tap#schemas", connection): connection.execute("ALTER TABLE tap_schema.schemas ADD COLUMN" " schema_index INTEGER")
[docs] @classmethod def u_20_add_tap_schema_keys(cls, connection): """re-importing TAP_SCHEMA to update foreign key declarations""" _updateTAP_SCHEMA(connection)
[docs]class To20Upgrader(Upgrader): version = 19
[docs] @classmethod def u_10_update_obscore_meta(cls, connection): """updating obscore metadata""" if (relationExists("ivoa._obscoresources", connection) and relationExists("ivoa.emptyobscore", connection)): dd = base.caches.getRD("//obscore", doQueries=False ).getById("refreshAfterSchemaUpdate") rsc.makeData(dd, connection=connection)
[docs] @classmethod def u_20_import_adql(cls, connection): """updating ADQL UDFs""" dd = base.caches.getRD("//adql", doQueries=False ).getById("make_udfs") rsc.makeData(dd, connection=connection, runCommit=False)
[docs]class To21Upgrader(Upgrader): version = 20
[docs] @classmethod def s_10_update_obscoresources(cls, connection): """adding sourcerd column to obscore sources table""" if (relationExists("ivoa._obscoresources", connection) and not "sourcerd" in getColumnNamesFor("//obscore#_obscoresources", connection)): connection.execute("ALTER TABLE ivoa._obscoresources" " ADD COLUMN sourcerd TEXT")
[docs] @classmethod def u_20_fill_obscoresources_sourcerd(cls, connection): """filling sourcerd column in obscore sources""" if relationExists("ivoa._obscoresources", connection): for row in connection.query("SELECT tableName FROM" " ivoa._obscoresources"): tableName = row[0] matches = list(connection.query("SELECT sourcerd FROM dc.tablemeta" " WHERE tablename=%(tableName)s", locals())) if not matches: # table dropped, drop it from obscoresources connection.execute("DELETE FROM ivoa._obscoresources" " WHERE tablename=%(tableName)s", locals()) else: sourceRD = matches[0][0] connection.execute("UPDATE ivoa._obscoresources" " SET sourcerd=%(sourceRD)s" " WHERE tablename=%(tableName)s", locals())
[docs]class To22Upgrader(Upgrader): version = 21
[docs] @classmethod def s_10_add_nrows_to_tablemeta(cls, connection): """adding nrows to dc.tablemeta""" if "nrows" not in getColumnNamesFor("//dc_tables#tablemeta", connection): connection.execute("ALTER TABLE dc.tablemeta ADD COLUMN" " nrows BIGINT")
[docs]class To23Upgrader(Upgrader): version = 22
[docs] @classmethod def u_10_add_and_fill_rds(cls, connection): """creating and filling dc.rdmeta""" rds = [] for rdId in registry.findAllRDs(): try: rds.append(("bootstrap", base.caches.getRD(rdId))) except Exception: # let's just ignore bad RDs; if people haven't noticed yet # this is not the time to tell them. pass rsc.makeData(base.resolveCrossId("//rds#update-for-rd"), forceSource=rsc.MultiForcedSources(rds), connection=connection)
[docs]class To24Upgrader(Upgrader): version = 23
[docs] @classmethod def u_000_update_funcs(cls, connection): """update GAVO server-side functions""" rsc.makeData(base.caches.getRD("//adql").getById("make_udfs"), connection=connection, runCommit=False)
[docs]class To25Upgrader(Upgrader): version = 24
[docs] @classmethod def u_000_hash_passwords(cls, connection): """scrypt-hashing existing passwords""" from gavo.protocols import creds updates = [] for username, password in connection.query( "SELECT username, password FROM dc.users"): if not re.match("[a-z]+:", password): updates.append((username, creds.hashPassword(password))) for username, hash in updates: connection.execute("UPDATE dc.users SET password=%(hash)s" " WHERE username=%(username)s", locals())
[docs]class To26Upgrader(Upgrader): version = 25
[docs] @classmethod def u_000_give_extra_float_digits(cls, connection): """ensuring postgres doesn't lose bits in floats""" with base.getWritableAdminConn() as suconn: for profileName in ["untrustedquery", "trustedquery", "admin"]: profile = base.getDBProfile(profileName) suconn.execute("alter role %s set extra_float_digits=3"%profile.user)
[docs]class To27Upgrader(Upgrader): version = 26
[docs] @classmethod def s_010_create_col_stats_table(cls, connection): """Creating the dc.simple_col_stats table""" if not relationExists("dc.simple_col_stats", connection): # this will first make tableName alone a primary key on dc.tablemeta tablemeta = rsc.TableForDef( base.resolveCrossId("//dc_tables#tablemeta"), connection=connection) tablemeta.updateMeta() tablemeta._dropPrimaryKey() tablemeta.makeIndices() rsc.makeData(base.resolveCrossId("//dc_tables#import"), connection=connection, runCommit=False)
[docs] @classmethod def s_020_add_coverage_to_tablemeta(cls, connection): """Adding a coverage column to rdmeta""" cols = getColumnNamesFor("//rds#rdmeta", connection) if "spatial" not in cols: connection.execute( "ALTER TABLE dc.rdmeta ADD COLUMN spatial TEXT") connection.execute( "ALTER TABLE dc.rdmeta ADD COLUMN temporal double precision[]") connection.execute( "ALTER TABLE dc.rdmeta ADD COLUMN spectral double precision[]")
[docs] @classmethod def u_30_update_obscore_meta(cls, connection): """Re-import obscore metadata to update for UCD erratum""" if list(connection.query( "select * from dc.tablemeta where tablename ILIKE 'ivoa.ObsCore'")): from gavo.protocols import tap rd = base.caches.getRD("//obscore") tap.publishToTAP(rd, connection) else: showProgress(" (not present)")
[docs]class To28Upgrader(Upgrader): version = 27
[docs] @classmethod def u_10_update_TAP_SCHEMA_META(cls, connection): """Declaring TAP_SCHEMA descriptions as unicode""" from gavo.protocols import tap tap._insertRDIntoTAP_SCHEMA(rscdesc.getRD("//tap"), connection)
[docs]class To29Upgrade(Upgrader): version = 28
[docs] @classmethod def u_10_update_EPNTAP_meta(cls, connection): """Updating EPNTAP2 metadata where necessary and possible""" rds = set() for td in iterEPNTAPTables(connection): try: rds.add(td.rd.sourceId) table = rsc.DBTable(td, connection=connection) table.updateMeta() except Exception as msg: base.ui.notifyWarning("Could not update EPN-TAP table" f" {td.getQName()}: {msg}. Please fix manually.") from gavo.protocols import tap for rdId in rds: try: tap.publishToTAP( base.caches.getRD(rdId), connection) except Exception as msg: base.ui.notifyWarning("Could not update TAP metadata for" f" {rdId}: {msg}. Please fix manually.")
[docs]class To30Upgrader(Upgrader): version = 29
[docs] @classmethod def u_10_make_stats_index(cls, connection): """Updating indices on dc.simple_col_stats""" table = rsc.TableForDef( base.resolveCrossId("//dc_tables#simple_col_stats"), connection=connection) table.dropIndices() table.makeIndices()
[docs] @classmethod def u_20_create_discrete_text_values(cls, connection): """Creating dc.discrete_string_values""" # do this rsc.TableForDef( base.resolveCrossId("//dc_tables#discrete_string_values"), connection=connection, create=True)
[docs]class To31Upgrader(Upgrader): version = 30
[docs] @classmethod def u_10_update_ssa_fov_ucd(cls, connection): """Updating SSA published metadata for SSA 1.1 erratum 2 (instr.fov)""" # Well, SSA metadata will actually fix itself (we might consider # re-publishing the services so their tableset gets updated, but # I'd say that's overkill. Instead, let's just fix whatever # has a problem in the TAP_SCHEMA. from gavo.protocols import tap for rdId, in connection.query( "SELECT DISTINCT sourcerd FROM tap_schema.columns" " WHERE ucd='instr.fov'"): tap.publishToTAP(base.caches.getRD(rdId), connection)
[docs]class To32Upgrader(Upgrader): version = 31
[docs] @classmethod def u_10_update_obscore_defs(cls, connection): """updating obscore definitions to use reals for spectral""" if relationExists("ivoa._obscoresources", connection): dd = base.caches.getRD("//obscore", doQueries=False ).getById("refreshAfterSchemaUpdate") rsc.makeData(dd, connection=connection)
[docs]class To33Upgrader(Upgrader): version = 32
[docs] @classmethod def s_010_add_nrows_in_tap_schema(cls, connection): """adding nrows to tap_schema.tables""" if "nrows" not in getColumnNamesFor("//tap#tables", connection): connection.execute("ALTER TABLE tap_schema.tables" " ADD COLUMN nrows BIGINT")
[docs] @classmethod def u_020_fill_tap_schema(cls, connection): """updating tap_schema to include nrows""" _updateTAP_SCHEMA(connection)
[docs]class To34Upgrader(Upgrader): version = 33
[docs] @classmethod def u_000_update_funcs(cls, connection): """update GAVO server-side functions""" rsc.makeData(base.caches.getRD("//adql").getById("make_udfs"), connection=connection, runCommit=False)
[docs]def iterStatements(startVersion, endVersion=CURRENT_SCHEMAVERSION, upgraders=None): """yields all upgraders from startVersion to endVersion in sequence. """ toRun = [] for upgrader in utils.iterDerivedClasses(Upgrader, upgraders or list(globals().values())): if startVersion<=upgrader.version<endVersion: toRun.append(upgrader) toRun.sort(key=lambda upgrader:upgrader.version) schemaUpgraded = False for upgrader in toRun: for statement in upgrader.iterSchemaChanges(): yield statement schemaUpgraded = True # schema changes must be written idempotently, so we can # commit here without damaging later runs. if schemaUpgraded: yield _COMMIT for upgrader in toRun: for statement in upgrader.iterNormalActions(): yield statement yield _COMMIT
[docs]def upgrade(forceDBVersion=None): """runs all updates necessary to bring a database to the CURRENT_SCHEMAVERSION. Unless catastrophic things go on, each upgrade is a transaction of its own; the first failed transaction stops the upgrade at the version last successfully upgraded to. """ if forceDBVersion is None: startVersion = getDBSchemaVersion() else: startVersion = forceDBVersion with base.getWritableAdminConn() as conn: for statement in iterStatements(startVersion, CURRENT_SCHEMAVERSION): if statement is _COMMIT: showProgress("> committing changes...") conn.commit() elif callable(statement): if statement.__doc__: showProgress("> %s..."%statement.__doc__) # if no docstring is present, we assume the function will output # custom user feedback statement(conn) else: showProgress("> "+getattr(statement, "annotation", "executing %s"%utils.makeEllipsis(statement, 60))+"... ") conn.execute(statement) showProgress(" ok\n") # other chores; add them below for the time being. clearThirdPartyCache()
################## misc chores # if we get more of those, I guess we ought to have # a module for those and report on them
[docs]def clearThirdPartyCache(): from gavo.web import ifpages tpcDir = ifpages.ThirdPartyCachePage.cacheDir if os.path.isdir(tpcDir): for fName in os.listdir(tpcDir): path = os.path.join(tpcDir) if os.path.isfile(path): os.unlink(path)
[docs]def parseCommandLine(): import argparse parser = argparse.ArgumentParser() parser.add_argument("--force-dbversion", help="assume this as the" " database's schema version. If you don't develop DaCHS, you" " almost certainly should stay clear of this flag", type=int, dest="forceDBVersion", default=None) parser.add_argument("-e", "--get-extension-script", help="Dump a script to update DaCHS-managed extensions (will" " print nothing if no extensions need updating). This will return" " 0 if material was written, 1 otherwise.", dest="dumpExtScript", action="store_true") parser.add_argument("-t", "--update-tap-schema", help="Force the re-generation of the TAP schema. This is basic" " housekeeping that's perhaps smart to do on the occasion of" " an upgrade.", dest="updateTAPSchema", action="store_true") return parser.parse_args()
[docs]def main(): args = parseCommandLine() if args.dumpExtScript: with base.getTableConn() as conn: sys.exit(dumpExtensionUpdater(conn)) else: with base.ui.suspended("ScriptRunning"): upgrade(args.forceDBVersion) if args.updateTAPSchema: with base.getWritableAdminConn() as conn: _updateTAP_SCHEMA(conn)