Mark AS users with their AS's ID

pull/4/merge
Daniel Wagner-Hall 9 years ago
parent 7718303e71
commit 763360594d
  1. 9
      scripts/synapse_port_db
  2. 2
      synapse/app/homeserver.py
  3. 34
      synapse/storage/appservice.py
  4. 5
      synapse/storage/engines/__init__.py
  5. 5
      synapse/storage/engines/postgres.py
  6. 5
      synapse/storage/engines/sqlite3.py
  7. 15
      synapse/storage/prepare_database.py
  8. 59
      synapse/storage/schema/delta/30/as_users.py
  9. 3
      tests/storage/test_base.py
  10. 20
      tests/utils.py

@ -309,8 +309,8 @@ class Porter(object):
**self.postgres_config["args"] **self.postgres_config["args"]
) )
sqlite_engine = create_engine("sqlite3") sqlite_engine = create_engine(FakeConfig(sqlite_config))
postgres_engine = create_engine("psycopg2") postgres_engine = create_engine(FakeConfig(postgres_config))
self.sqlite_store = Store(sqlite_db_pool, sqlite_engine) self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
self.postgres_store = Store(postgres_db_pool, postgres_engine) self.postgres_store = Store(postgres_db_pool, postgres_engine)
@ -792,3 +792,8 @@ if __name__ == "__main__":
if end_error_exec_info: if end_error_exec_info:
exc_type, exc_value, exc_traceback = end_error_exec_info exc_type, exc_value, exc_traceback = end_error_exec_info
traceback.print_exception(exc_type, exc_value, exc_traceback) traceback.print_exception(exc_type, exc_value, exc_traceback)
class FakeConfig:
def __init__(self, database_config):
self.database_config = database_config

@ -382,7 +382,7 @@ def setup(config_options):
tls_server_context_factory = context_factory.ServerContextFactory(config) tls_server_context_factory = context_factory.ServerContextFactory(config)
database_engine = create_engine(config.database_config["name"]) database_engine = create_engine(config)
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
hs = SynapseHomeServer( hs = SynapseHomeServer(

@ -34,8 +34,8 @@ class ApplicationServiceStore(SQLBaseStore):
def __init__(self, hs): def __init__(self, hs):
super(ApplicationServiceStore, self).__init__(hs) super(ApplicationServiceStore, self).__init__(hs)
self.hostname = hs.hostname self.hostname = hs.hostname
self.services_cache = [] self.services_cache = ApplicationServiceStore.load_appservices(
self._populate_appservice_cache( hs.hostname,
hs.config.app_service_config_files hs.config.app_service_config_files
) )
@ -144,21 +144,23 @@ class ApplicationServiceStore(SQLBaseStore):
return rooms_for_user_matching_user_id return rooms_for_user_matching_user_id
def _load_appservice(self, as_info): @classmethod
def _load_appservice(cls, hostname, as_info, config_filename):
required_string_fields = [ required_string_fields = [
# TODO: Add id here when it's stable to release "id", "url", "as_token", "hs_token", "sender_localpart"
"url", "as_token", "hs_token", "sender_localpart"
] ]
for field in required_string_fields: for field in required_string_fields:
if not isinstance(as_info.get(field), basestring): if not isinstance(as_info.get(field), basestring):
raise KeyError("Required string field: '%s'", field) raise KeyError("Required string field: '%s' (%s)" % (
field, config_filename,
))
localpart = as_info["sender_localpart"] localpart = as_info["sender_localpart"]
if urllib.quote(localpart) != localpart: if urllib.quote(localpart) != localpart:
raise ValueError( raise ValueError(
"sender_localpart needs characters which are not URL encoded." "sender_localpart needs characters which are not URL encoded."
) )
user = UserID(localpart, self.hostname) user = UserID(localpart, hostname)
user_id = user.to_string() user_id = user.to_string()
# namespace checks # namespace checks
@ -188,25 +190,30 @@ class ApplicationServiceStore(SQLBaseStore):
namespaces=as_info["namespaces"], namespaces=as_info["namespaces"],
hs_token=as_info["hs_token"], hs_token=as_info["hs_token"],
sender=user_id, sender=user_id,
id=as_info["id"] if "id" in as_info else as_info["as_token"], id=as_info["id"],
) )
def _populate_appservice_cache(self, config_files): @classmethod
"""Populates a cache of Application Services from the config files.""" def load_appservices(cls, hostname, config_files):
"""Returns a list of Application Services from the config files."""
if not isinstance(config_files, list): if not isinstance(config_files, list):
logger.warning( logger.warning(
"Expected %s to be a list of AS config files.", config_files "Expected %s to be a list of AS config files.", config_files
) )
return return []
# Dicts of value -> filename # Dicts of value -> filename
seen_as_tokens = {} seen_as_tokens = {}
seen_ids = {} seen_ids = {}
appservices = []
for config_file in config_files: for config_file in config_files:
try: try:
with open(config_file, 'r') as f: with open(config_file, 'r') as f:
appservice = self._load_appservice(yaml.load(f)) appservice = ApplicationServiceStore._load_appservice(
hostname, yaml.load(f), config_file
)
if appservice.id in seen_ids: if appservice.id in seen_ids:
raise ConfigError( raise ConfigError(
"Cannot reuse ID across application services: " "Cannot reuse ID across application services: "
@ -226,11 +233,12 @@ class ApplicationServiceStore(SQLBaseStore):
) )
seen_as_tokens[appservice.token] = config_file seen_as_tokens[appservice.token] = config_file
logger.info("Loaded application service: %s", appservice) logger.info("Loaded application service: %s", appservice)
self.services_cache.append(appservice) appservices.append(appservice)
except Exception as e: except Exception as e:
logger.error("Failed to load appservice from '%s'", config_file) logger.error("Failed to load appservice from '%s'", config_file)
logger.exception(e) logger.exception(e)
raise raise
return appservices
class ApplicationServiceTransactionStore(SQLBaseStore): class ApplicationServiceTransactionStore(SQLBaseStore):

@ -26,12 +26,13 @@ SUPPORTED_MODULE = {
} }
def create_engine(name): def create_engine(config):
name = config.database_config["name"]
engine_class = SUPPORTED_MODULE.get(name, None) engine_class = SUPPORTED_MODULE.get(name, None)
if engine_class: if engine_class:
module = importlib.import_module(name) module = importlib.import_module(name)
return engine_class(module) return engine_class(module, config=config)
raise RuntimeError( raise RuntimeError(
"Unsupported database engine '%s'" % (name,) "Unsupported database engine '%s'" % (name,)

@ -21,9 +21,10 @@ from ._base import IncorrectDatabaseSetup
class PostgresEngine(object): class PostgresEngine(object):
single_threaded = False single_threaded = False
def __init__(self, database_module): def __init__(self, database_module, config):
self.module = database_module self.module = database_module
self.module.extensions.register_type(self.module.extensions.UNICODE) self.module.extensions.register_type(self.module.extensions.UNICODE)
self.config = config
def check_database(self, txn): def check_database(self, txn):
txn.execute("SHOW SERVER_ENCODING") txn.execute("SHOW SERVER_ENCODING")
@ -44,7 +45,7 @@ class PostgresEngine(object):
) )
def prepare_database(self, db_conn): def prepare_database(self, db_conn):
prepare_database(db_conn, self) prepare_database(db_conn, self, config=self.config)
def is_deadlock(self, error): def is_deadlock(self, error):
if isinstance(error, self.module.DatabaseError): if isinstance(error, self.module.DatabaseError):

@ -23,8 +23,9 @@ import struct
class Sqlite3Engine(object): class Sqlite3Engine(object):
single_threaded = True single_threaded = True
def __init__(self, database_module): def __init__(self, database_module, config):
self.module = database_module self.module = database_module
self.config = config
def check_database(self, txn): def check_database(self, txn):
pass pass
@ -38,7 +39,7 @@ class Sqlite3Engine(object):
def prepare_database(self, db_conn): def prepare_database(self, db_conn):
prepare_sqlite3_database(db_conn) prepare_sqlite3_database(db_conn)
prepare_database(db_conn, self) prepare_database(db_conn, self, config=self.config)
def is_deadlock(self, error): def is_deadlock(self, error):
return False return False

@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
# Remember to update this number every time a change is made to database # Remember to update this number every time a change is made to database
# schema files, so the users will be informed on server restarts. # schema files, so the users will be informed on server restarts.
SCHEMA_VERSION = 29 SCHEMA_VERSION = 30
dir_path = os.path.abspath(os.path.dirname(__file__)) dir_path = os.path.abspath(os.path.dirname(__file__))
@ -50,7 +50,7 @@ class UpgradeDatabaseException(PrepareDatabaseException):
pass pass
def prepare_database(db_conn, database_engine): def prepare_database(db_conn, database_engine, config):
"""Prepares a database for usage. Will either create all necessary tables """Prepares a database for usage. Will either create all necessary tables
or upgrade from an older schema version. or upgrade from an older schema version.
""" """
@ -61,10 +61,10 @@ def prepare_database(db_conn, database_engine):
if version_info: if version_info:
user_version, delta_files, upgraded = version_info user_version, delta_files, upgraded = version_info
_upgrade_existing_database( _upgrade_existing_database(
cur, user_version, delta_files, upgraded, database_engine cur, user_version, delta_files, upgraded, database_engine, config
) )
else: else:
_setup_new_database(cur, database_engine) _setup_new_database(cur, database_engine, config)
# cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,)) # cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,))
@ -75,7 +75,7 @@ def prepare_database(db_conn, database_engine):
raise raise
def _setup_new_database(cur, database_engine): def _setup_new_database(cur, database_engine, config):
"""Sets up the database by finding a base set of "full schemas" and then """Sets up the database by finding a base set of "full schemas" and then
applying any necessary deltas. applying any necessary deltas.
@ -148,11 +148,12 @@ def _setup_new_database(cur, database_engine):
applied_delta_files=[], applied_delta_files=[],
upgraded=False, upgraded=False,
database_engine=database_engine, database_engine=database_engine,
config=config,
) )
def _upgrade_existing_database(cur, current_version, applied_delta_files, def _upgrade_existing_database(cur, current_version, applied_delta_files,
upgraded, database_engine): upgraded, database_engine, config):
"""Upgrades an existing database. """Upgrades an existing database.
Delta files can either be SQL stored in *.sql files, or python modules Delta files can either be SQL stored in *.sql files, or python modules
@ -245,7 +246,7 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files,
module_name, absolute_path, python_file module_name, absolute_path, python_file
) )
logger.debug("Running script %s", relative_path) logger.debug("Running script %s", relative_path)
module.run_upgrade(cur, database_engine) module.run_upgrade(cur, database_engine, config=config)
elif ext == ".pyc": elif ext == ".pyc":
# Sometimes .pyc files turn up anyway even though we've # Sometimes .pyc files turn up anyway even though we've
# disabled their generation; e.g. from distribution package # disabled their generation; e.g. from distribution package

@ -0,0 +1,59 @@
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from synapse.storage.appservice import ApplicationServiceStore
logger = logging.getLogger(__name__)
def run_upgrade(cur, database_engine, config, *args, **kwargs):
# NULL indicates user was not registered by an appservice.
cur.execute("ALTER TABLE users ADD COLUMN appservice_id TEXT")
cur.execute("SELECT name FROM users")
rows = cur.fetchall()
config_files = []
try:
config_files = config.app_service_config_files
except AttributeError:
logger.warning("Could not get app_service_config_files from config")
pass
appservices = ApplicationServiceStore.load_appservices(
config.server_name, config_files
)
owned = {}
for row in rows:
user_id = row[0]
for appservice in appservices:
if appservice.is_exclusive_user(user_id):
if user_id in owned.keys():
logger.error(
"user_id %s was owned by more than one application"
" service (IDs %s and %s); assigning arbitrarily to %s" %
(user_id, owned[user_id], appservice.id, owned[user_id])
)
owned[user_id] = appservice.id
for user_id, as_id in owned.items():
cur.execute(
database_engine.convert_param_style(
"UPDATE users SET appservice_id = ? WHERE name = ?"
),
(as_id, user_id)
)

@ -48,11 +48,12 @@ class SQLBaseStoreTestCase(unittest.TestCase):
config = Mock() config = Mock()
config.event_cache_size = 1 config.event_cache_size = 1
config.database_config = {"name": "sqlite3"}
hs = HomeServer( hs = HomeServer(
"test", "test",
db_pool=self.db_pool, db_pool=self.db_pool,
config=config, config=config,
database_engine=create_engine("sqlite3"), database_engine=create_engine(config),
) )
self.datastore = SQLBaseStore(hs) self.datastore = SQLBaseStore(hs)

@ -51,6 +51,8 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
config.server_name = "server.under.test" config.server_name = "server.under.test"
config.trusted_third_party_id_servers = [] config.trusted_third_party_id_servers = []
config.database_config = {"name": "sqlite3"}
if "clock" not in kargs: if "clock" not in kargs:
kargs["clock"] = MockClock() kargs["clock"] = MockClock()
@ -60,7 +62,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
hs = HomeServer( hs = HomeServer(
name, db_pool=db_pool, config=config, name, db_pool=db_pool, config=config,
version_string="Synapse/tests", version_string="Synapse/tests",
database_engine=create_engine("sqlite3"), database_engine=create_engine(config),
get_db_conn=db_pool.get_db_conn, get_db_conn=db_pool.get_db_conn,
**kargs **kargs
) )
@ -69,7 +71,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
hs = HomeServer( hs = HomeServer(
name, db_pool=None, datastore=datastore, config=config, name, db_pool=None, datastore=datastore, config=config,
version_string="Synapse/tests", version_string="Synapse/tests",
database_engine=create_engine("sqlite3"), database_engine=create_engine(config),
**kargs **kargs
) )
@ -277,18 +279,24 @@ class SQLiteMemoryDbPool(ConnectionPool, object):
cp_max=1, cp_max=1,
) )
self.config = Mock()
self.config.database_config = {"name": "sqlite3"}
def prepare(self): def prepare(self):
engine = create_engine("sqlite3") engine = self.create_engine()
return self.runWithConnection( return self.runWithConnection(
lambda conn: prepare_database(conn, engine) lambda conn: prepare_database(conn, engine, self.config)
) )
def get_db_conn(self): def get_db_conn(self):
conn = self.connect() conn = self.connect()
engine = create_engine("sqlite3") engine = self.create_engine()
prepare_database(conn, engine) prepare_database(conn, engine, self.config)
return conn return conn
def create_engine(self):
return create_engine(self.config)
class MemoryDataStore(object): class MemoryDataStore(object):

Loading…
Cancel
Save