Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __version__ = "$Revision: 1.127 $" 
  15  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  16  __license__ = 'GPL (details at http://www.gnu.org)' 
  17   
  18  ### imports ### 
  19  # stdlib 
  20  import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging 
  21   
  22   
  23  # GNUmed 
  24  if __name__ == '__main__': 
  25          sys.path.insert(0, '../../') 
  26  from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2 
  27  from Gnumed.pycommon.gmTools import prompted_input 
  28   
  29  _log = logging.getLogger('gm.db') 
  30  _log.info(__version__) 
  31   
  32   
  33  # 3rd party 
  34  try: 
  35          import psycopg2 as dbapi 
  36  except ImportError: 
  37          _log.exception("Python database adapter psycopg2 not found.") 
  38          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  39          raise 
  40  ### imports ### 
  41   
  42   
  43  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  44  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  45  if not (float(dbapi.apilevel) >= 2.0): 
  46          raise ImportError('gmPG2: supported DB-API level too low') 
  47  if not (dbapi.threadsafety > 0): 
  48          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  49  if not (dbapi.paramstyle == 'pyformat'): 
  50          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  51  try: 
  52          dbapi.__version__.index('dt') 
  53  except ValueError: 
  54          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  55  try: 
  56          dbapi.__version__.index('ext') 
  57  except ValueError: 
  58          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('pq3') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  63   
  64  import psycopg2.extras 
  65  import psycopg2.extensions 
  66  import psycopg2.pool 
  67  import psycopg2.errorcodes as sql_error_codes 
  68   
  69  # ======================================================================= 
  70  _default_client_encoding = 'UTF8' 
  71  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  72   
  73  # things timezone 
  74  _default_client_timezone = None                 # default time zone for connections 
  75  _sql_set_timezone = None 
  76  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  77  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  78   
  79  _default_dsn = None 
  80  _default_login = None 
  81   
  82  postgresql_version_string = None 
  83  postgresql_version = None                       # accuracy: major.minor 
  84   
  85  __ro_conn_pool = None 
  86   
  87  auto_request_login_params = True 
  88  # ======================================================================= 
  89  # global data 
  90  # ======================================================================= 
  91   
  92  known_schema_hashes = { 
  93          'devel': 'not released, testing only', 
  94          'v2': 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
  95          'v3': 'e73718eaf230d8f1d2d01afa8462e176', 
  96          'v4': '4428ccf2e54c289136819e701bb095ea', 
  97          'v5': '7e7b093af57aea48c288e76632a382e5',       # ... old (v1) style hashes 
  98          'v6': '90e2026ac2efd236da9c8608b8685b2d',       # new (v2) style hashes ... 
  99          'v7': '6c9f6d3981483f8e9433df99d1947b27', 
 100          'v8': '89b13a7af83337c3aad153b717e52360', 
 101          'v9': '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 102          'v10': '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 103          'v11': '03042ae24f3f92877d986fb0a6184d76', 
 104          'v12': '06183a6616db62257e22814007a8ed07', 
 105          'v13': 'fab7c1ae408a6530c47f9b5111a0841e', 
 106          'v14': 'e170d543f067d1ea60bfe9076b1560cf', 
 107          'v15': '70012ff960b77ecdff4981c94b5b55b6' 
 108  } 
 109   
 110  map_schema_hash2version = { 
 111          'b09d50d7ed3f91ddf4c4ddb8ea507720': 'v2', 
 112          'e73718eaf230d8f1d2d01afa8462e176': 'v3', 
 113          '4428ccf2e54c289136819e701bb095ea': 'v4', 
 114          '7e7b093af57aea48c288e76632a382e5': 'v5', 
 115          '90e2026ac2efd236da9c8608b8685b2d': 'v6', 
 116          '6c9f6d3981483f8e9433df99d1947b27': 'v7', 
 117          '89b13a7af83337c3aad153b717e52360': 'v8', 
 118          '641a9b2be3c378ffc2bb2f0b1c9f051d': 'v9', 
 119          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 'v10', 
 120          '03042ae24f3f92877d986fb0a6184d76': 'v11', 
 121          '06183a6616db62257e22814007a8ed07': 'v12', 
 122          'fab7c1ae408a6530c47f9b5111a0841e': 'v13', 
 123          'e170d543f067d1ea60bfe9076b1560cf': 'v14', 
 124          '70012ff960b77ecdff4981c94b5b55b6': 'v15' 
 125  } 
 126   
 127  map_client_branch2required_db_version = { 
 128          u'GIT tree': u'devel', 
 129          u'0.3': u'v9', 
 130          u'0.4': u'v10', 
 131          u'0.5': u'v11', 
 132          u'0.6': u'v12', 
 133          u'0.7': u'v13', 
 134          u'0.8': u'v14', 
 135          u'0.9': u'v15' 
 136  } 
 137   
 138  # get columns and data types for a given table 
 139  query_table_col_defs = u"""select 
 140          cols.column_name, 
 141          cols.udt_name 
 142  from 
 143          information_schema.columns cols 
 144  where 
 145          cols.table_schema = %s 
 146                  and 
 147          cols.table_name = %s 
 148  order by 
 149          cols.ordinal_position""" 
 150   
 151  query_table_attributes = u"""select 
 152          cols.column_name 
 153  from 
 154          information_schema.columns cols 
 155  where 
 156          cols.table_schema = %s 
 157                  and 
 158          cols.table_name = %s 
 159  order by 
 160          cols.ordinal_position""" 
 161   
 162  # ======================================================================= 
 163  # module globals API 
 164  # ======================================================================= 
165 -def set_default_client_encoding(encoding = None):
166 # check whether psycopg2 can handle this encoding 167 if encoding not in psycopg2.extensions.encodings: 168 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 169 # check whether Python can handle this encoding 170 py_enc = psycopg2.extensions.encodings[encoding] 171 try: 172 codecs.lookup(py_enc) 173 except LookupError: 174 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 175 raise 176 # FIXME: check encoding against the database 177 # FIXME: - but we may not yet have access 178 # FIXME: - psycopg2 will pull its encodings from the database eventually 179 # it seems save to set it 180 global _default_client_encoding 181 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 182 _default_client_encoding = encoding 183 return True
184 #---------------------------------------------------
185 -def set_default_client_timezone(timezone = None):
186 187 # FIXME: use __validate 188 global _default_client_timezone 189 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 190 _default_client_timezone = timezone 191 192 global _sql_set_timezone 193 _sql_set_timezone = u'set timezone to %s' 194 195 return True
196 #---------------------------------------------------
197 -def __validate_timezone(conn=None, timezone=None):
198 199 _log.debug(u'validating time zone [%s]', timezone) 200 201 cmd = u'set timezone to %(tz)s' 202 args = {u'tz': timezone} 203 204 conn.commit() 205 curs = conn.cursor() 206 is_valid = False 207 try: 208 curs.execute(cmd, args) 209 _log.info(u'time zone [%s] is settable', timezone) 210 # can we actually use it, though ? 211 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 212 try: 213 curs.execute(cmd) 214 curs.fetchone() 215 _log.info(u'time zone [%s] is usable', timezone) 216 is_valid = True 217 except: 218 _log.error('error using time zone [%s]', timezone) 219 except dbapi.DataError: 220 _log.warning(u'time zone [%s] is not settable', timezone) 221 except: 222 _log.error(u'failed to set time zone to [%s]', timezone) 223 _log.exception(u'') 224 225 curs.close() 226 conn.rollback() 227 228 return is_valid
229 #---------------------------------------------------
230 -def __expand_timezone(conn=None, timezone=None):
231 """some timezone defs are abbreviations so try to expand 232 them because "set time zone" doesn't take abbreviations""" 233 234 cmd = u""" 235 select distinct on (abbrev) name 236 from pg_timezone_names 237 where 238 abbrev = %(tz)s and 239 name ~ '^[^/]+/[^/]+$' and 240 name !~ '^Etc/' 241 """ 242 args = {u'tz': timezone} 243 244 conn.commit() 245 curs = conn.cursor() 246 247 result = timezone 248 try: 249 curs.execute(cmd, args) 250 rows = curs.fetchall() 251 if len(rows) > 0: 252 result = rows[0][0] 253 _log.debug(u'[%s] maps to [%s]', timezone, result) 254 except: 255 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 256 257 curs.close() 258 conn.rollback() 259 260 return result
261 #---------------------------------------------------
262 -def __detect_client_timezone(conn=None):
263 """This is run on the very first connection.""" 264 265 # FIXME: check whether server.timezone is the same 266 # FIXME: value as what we eventually detect 267 268 # we need gmDateTime to be initialized 269 if gmDateTime.current_local_iso_numeric_timezone_string is None: 270 gmDateTime.init() 271 272 _log.debug('trying to detect timezone from system') 273 274 tz_candidates = [] 275 try: 276 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 277 tz_candidates.append(tz) 278 expanded = __expand_timezone(conn = conn, timezone = tz) 279 if expanded != tz: 280 tz_candidates.append(expanded) 281 except KeyError: 282 pass 283 284 tz_candidates.append(gmDateTime.current_local_timezone_name) 285 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 286 if expanded != gmDateTime.current_local_timezone_name: 287 tz_candidates.append(expanded) 288 289 _log.debug('candidates: %s', str(tz_candidates)) 290 291 # find best among candidates 292 global _default_client_timezone 293 global _sql_set_timezone 294 found = False 295 for tz in tz_candidates: 296 if __validate_timezone(conn = conn, timezone = tz): 297 _default_client_timezone = tz 298 _sql_set_timezone = u'set timezone to %s' 299 found = True 300 break 301 302 if not found: 303 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 304 _sql_set_timezone = u"set time zone interval %s hour to minute" 305 306 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
307 # ======================================================================= 308 # login API 309 # =======================================================================
310 -def __request_login_params_tui():
311 """Text mode request of database login parameters""" 312 import getpass 313 login = gmLoginInfo.LoginInfo() 314 315 print "\nPlease enter the required login parameters:" 316 try: 317 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 318 login.database = prompted_input(prompt = "database", default = 'gnumed_v15') 319 login.user = prompted_input(prompt = "user name", default = '') 320 tmp = 'password for "%s" (not shown): ' % login.user 321 login.password = getpass.getpass(tmp) 322 login.port = prompted_input(prompt = "port", default = 5432) 323 except KeyboardInterrupt: 324 _log.warning("user cancelled text mode login dialog") 325 print "user cancelled text mode login dialog" 326 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 327 328 return login
329 #---------------------------------------------------
330 -def __request_login_params_gui_wx():
331 """GUI (wx) input request for database login parameters. 332 333 Returns gmLoginInfo.LoginInfo object 334 """ 335 import wx 336 # OK, wxPython was already loaded. But has the main Application instance 337 # been initialized yet ? if not, the exception will kick us out 338 if wx.GetApp() is None: 339 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 340 341 # Let's launch the login dialog 342 # if wx was not initialized /no main App loop, an exception should be raised anyway 343 import gmAuthWidgets 344 dlg = gmAuthWidgets.cLoginDialog(None, -1) 345 dlg.ShowModal() 346 login = dlg.panel.GetLoginInfo() 347 dlg.Destroy() 348 349 #if user cancelled or something else went wrong, raise an exception 350 if login is None: 351 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 352 353 return login
354 #---------------------------------------------------
355 -def request_login_params():
356 """Request login parameters for database connection.""" 357 # do we auto-request parameters at all ? 358 if not auto_request_login_params: 359 raise Exception('Cannot request login parameters.') 360 361 # are we inside X ? 362 # (if we aren't wxGTK will crash hard at 363 # C-level with "can't open Display") 364 if os.environ.has_key('DISPLAY'): 365 # try wxPython GUI 366 try: return __request_login_params_gui_wx() 367 except: pass 368 369 # well, either we are on the console or 370 # wxPython does not work, use text mode 371 return __request_login_params_tui()
372 373 # ======================================================================= 374 # DSN API 375 # -----------------------------------------------------------------------
376 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
377 dsn_parts = [] 378 379 if (database is not None) and (database.strip() != ''): 380 dsn_parts.append('dbname=%s' % database) 381 382 if (host is not None) and (host.strip() != ''): 383 dsn_parts.append('host=%s' % host) 384 385 if (port is not None) and (str(port).strip() != ''): 386 dsn_parts.append('port=%s' % port) 387 388 if (user is not None) and (user.strip() != ''): 389 dsn_parts.append('user=%s' % user) 390 391 if (password is not None) and (password.strip() != ''): 392 dsn_parts.append('password=%s' % password) 393 394 dsn_parts.append('sslmode=prefer') 395 396 return ' '.join(dsn_parts)
397 # ------------------------------------------------------
398 -def get_default_login():
399 # make sure we do have a login 400 get_default_dsn() 401 return _default_login
402 # ------------------------------------------------------
403 -def get_default_dsn():
404 global _default_dsn 405 if _default_dsn is not None: 406 return _default_dsn 407 408 login = request_login_params() 409 set_default_login(login=login) 410 411 return _default_dsn
412 # ------------------------------------------------------
413 -def set_default_login(login=None):
414 if login is None: 415 return False 416 417 if login.host is not None: 418 if login.host.strip() == u'': 419 login.host = None 420 421 global _default_login 422 _default_login = login 423 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 424 425 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 426 427 global _default_dsn 428 _default_dsn = dsn 429 _log.info('setting default DSN from [%s] to [%s]' % (_default_dsn, dsn)) 430 431 return True
432 # ======================================================================= 433 # netadata API 434 # =======================================================================
435 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
436 expected_hash = known_schema_hashes[version] 437 if version == 'devel': 438 args = {'ver': '9999'} 439 else: 440 args = {'ver': version.strip('v')} 441 rows, idx = run_ro_queries ( 442 link_obj = link_obj, 443 queries = [{ 444 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 445 'args': args 446 }] 447 ) 448 if rows[0]['md5'] != expected_hash: 449 _log.error('database schema version mismatch') 450 _log.error('expected: %s (%s)' % (version, expected_hash)) 451 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 452 if verbose: 453 _log.debug('schema dump follows:') 454 for line in get_schema_structure(link_obj=link_obj).split(): 455 _log.debug(line) 456 _log.debug('schema revision history dump follows:') 457 for line in get_schema_revision_history(link_obj=link_obj): 458 _log.debug(u' - '.join(line)) 459 return False 460 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 461 return True
462 #------------------------------------------------------------------------
463 -def get_schema_version(link_obj=None):
464 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 465 try: 466 return map_schema_hash2version[rows[0]['md5']] 467 except KeyError: 468 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
469 #------------------------------------------------------------------------
470 -def get_schema_structure(link_obj=None):
471 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 472 return rows[0][0]
473 #------------------------------------------------------------------------
474 -def get_schema_revision_history(link_obj=None):
475 cmd = u""" 476 select 477 imported::text, 478 version, 479 filename 480 from gm.schema_revision 481 order by imported 482 """ 483 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 484 return rows
485 #------------------------------------------------------------------------
486 -def get_current_user():
487 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 488 return rows[0][0]
489 #------------------------------------------------------------------------
490 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
491 """Get the foreign keys pointing to schema.table.column. 492 493 Does not properly work with multi-column FKs. 494 GNUmed doesn't use any, however. 495 """ 496 cmd = u""" 497 select 498 %(schema)s as referenced_schema, 499 %(tbl)s as referenced_table, 500 %(col)s as referenced_column, 501 pgc.confkey as referenced_column_list, 502 pgc.conrelid::regclass as referencing_table, 503 pgc.conkey as referencing_column_list, 504 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 505 from 506 pg_constraint pgc 507 where 508 pgc.contype = 'f' 509 and 510 pgc.confrelid = ( 511 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 512 select oid from pg_namespace where nspname = %(schema)s 513 ) 514 ) and 515 ( 516 select attnum 517 from pg_attribute 518 where 519 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 520 select oid from pg_namespace where nspname = %(schema)s 521 )) 522 and 523 attname = %(col)s 524 ) = any(pgc.confkey) 525 """ 526 527 args = { 528 'schema': schema, 529 'tbl': table, 530 'col': column 531 } 532 533 rows, idx = run_ro_queries ( 534 link_obj = link_obj, 535 queries = [ 536 {'cmd': cmd, 'args': args} 537 ] 538 ) 539 540 return rows
541 #------------------------------------------------------------------------
542 -def get_child_tables(schema='public', table=None, link_obj=None):
543 """Return child tables of <table>.""" 544 cmd = u""" 545 select 546 pgn.nspname as namespace, 547 pgc.relname as table 548 from 549 pg_namespace pgn, 550 pg_class pgc 551 where 552 pgc.relnamespace = pgn.oid 553 and 554 pgc.oid in ( 555 select inhrelid from pg_inherits where inhparent = ( 556 select oid from pg_class where 557 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 558 relname = %(table)s 559 ) 560 )""" 561 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 562 return rows
563 #------------------------------------------------------------------------
564 -def table_exists(link_obj=None, schema=None, table=None):
565 """Returns false, true.""" 566 cmd = u""" 567 select exists ( 568 select 1 from information_schema.tables 569 where 570 table_schema = %s and 571 table_name = %s and 572 table_type = 'BASE TABLE' 573 )""" 574 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 575 return rows[0][0]
576 #------------------------------------------------------------------------
577 -def get_col_indices(cursor = None):
578 if cursor.description is None: 579 _log.error('no result description available: unused cursor or last query did not select rows') 580 return None 581 col_indices = {} 582 col_index = 0 583 for col_desc in cursor.description: 584 col_name = col_desc[0] 585 # a query like "select 1,2;" will return two columns of the same name ! 586 # hence adjust to that, note, however, that dict-style access won't work 587 # on results of such queries ... 588 if col_indices.has_key(col_name): 589 col_name = '%s_%s' % (col_name, col_index) 590 col_indices[col_name] = col_index 591 col_index += 1 592 593 return col_indices
594 #------------------------------------------------------------------------
595 -def get_col_defs(link_obj=None, schema='public', table=None):
596 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 597 col_names = [] 598 col_type = {} 599 for row in rows: 600 col_names.append(row[0]) 601 # map array types 602 if row[1].startswith('_'): 603 col_type[row[0]] = row[1][1:] + '[]' 604 else: 605 col_type[row[0]] = row[1] 606 col_defs = [] 607 col_defs.append(col_names) 608 col_defs.append(col_type) 609 return col_defs
610 #------------------------------------------------------------------------
611 -def get_col_names(link_obj=None, schema='public', table=None):
612 """Return column attributes of table""" 613 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 614 cols = [] 615 for row in rows: 616 cols.append(row[0]) 617 return cols
618 619 #------------------------------------------------------------------------ 620 # i18n functions 621 #------------------------------------------------------------------------
622 -def export_translations_from_database(filename=None):
623 tx_file = codecs.open(filename, 'wb', 'utf8') 624 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 625 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 626 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 627 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 628 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 629 tx_file.write(u'set default_transaction_read_only to off\n\n') 630 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 631 632 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 633 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 634 for row in rows: 635 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 636 row['lang'].replace("'", "\\'"), 637 row['orig'].replace("'", "\\'"), 638 row['trans'].replace("'", "\\'") 639 ) 640 tx_file.write(line) 641 tx_file.write(u'\n') 642 643 tx_file.write(u'\set ON_ERROR_STOP 1\n') 644 tx_file.close() 645 646 return True
647 #------------------------------------------------------------------------
648 -def delete_translation_from_database(link_obj=None, language=None, original=None):
649 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 650 args = {'lang': language, 'orig': original} 651 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 652 return True
653 654 #------------------------------------------------------------------------
655 -def update_translation_in_database(language=None, original=None, translation=None):
656 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 657 args = {'lang': language, 'orig': original, 'trans': translation} 658 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 659 return args
660 661 #------------------------------------------------------------------------
662 -def get_translation_languages():
663 rows, idx = run_ro_queries ( 664 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 665 ) 666 return [ r[0] for r in rows ]
667 668 #------------------------------------------------------------------------
669 -def get_database_translations(language=None, order_by=None):
670 671 args = {'lang': language} 672 _log.debug('language [%s]', language) 673 674 if order_by is None: 675 order_by = u'ORDER BY %s' % order_by 676 else: 677 order_by = u'ORDER BY lang, orig' 678 679 if language is None: 680 cmd = u""" 681 SELECT DISTINCT ON (orig, lang) 682 lang, orig, trans 683 FROM (( 684 685 -- strings stored as translation keys whether translated or not 686 SELECT 687 NULL as lang, 688 ik.orig, 689 NULL AS trans 690 FROM 691 i18n.keys ik 692 693 ) UNION ALL ( 694 695 -- already translated strings 696 SELECT 697 it.lang, 698 it.orig, 699 it.trans 700 FROM 701 i18n.translations it 702 703 )) as translatable_strings 704 %s""" % order_by 705 else: 706 cmd = u""" 707 SELECT DISTINCT ON (orig, lang) 708 lang, orig, trans 709 FROM (( 710 711 -- strings stored as translation keys whether translated or not 712 SELECT 713 %%(lang)s as lang, 714 ik.orig, 715 i18n._(ik.orig, %%(lang)s) AS trans 716 FROM 717 i18n.keys ik 718 719 ) UNION ALL ( 720 721 -- already translated strings 722 SELECT 723 %%(lang)s as lang, 724 it.orig, 725 i18n._(it.orig, %%(lang)s) AS trans 726 FROM 727 i18n.translations it 728 729 )) AS translatable_strings 730 %s""" % order_by 731 732 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 733 734 if rows is None: 735 _log.error('no translatable strings found') 736 else: 737 _log.debug('%s translatable strings found', len(rows)) 738 739 return rows
740 741 #------------------------------------------------------------------------
742 -def get_current_user_language():
743 cmd = u'select i18n.get_curr_lang()' 744 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 745 return rows[0][0]
746 747 #------------------------------------------------------------------------
748 -def set_user_language(user=None, language=None):
749 """Set the user language in the database. 750 751 user = None: current db user 752 language = None: unset 753 """ 754 _log.info('setting database language for user [%s] to [%s]', user, language) 755 756 args = { 757 'usr': user, 758 'lang': language 759 } 760 761 if language is None: 762 if user is None: 763 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 764 else: 765 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 766 queries.append({'cmd': u'select True'}) 767 else: 768 if user is None: 769 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 770 else: 771 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 772 773 rows, idx = run_rw_queries(queries = queries, return_data = True) 774 775 if not rows[0][0]: 776 _log.error('cannot set database language to [%s] for user [%s]', language, user) 777 778 return rows[0][0]
779 #------------------------------------------------------------------------
780 -def force_user_language(language=None):
781 """Set the user language in the database. 782 783 - regardless of whether there is any translation available. 784 - only for the current user 785 """ 786 _log.info('forcing database language for current db user to [%s]', language) 787 788 run_rw_queries(queries = [{ 789 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 790 'args': {'lang': language} 791 }])
792 #------------------------------------------------------------------------ 793 #------------------------------------------------------------------------ 794 text_expansion_keywords = None 795
796 -def get_text_expansion_keywords():
797 global text_expansion_keywords 798 if text_expansion_keywords is not None: 799 return text_expansion_keywords 800 801 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions""" 802 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 803 text_expansion_keywords = rows 804 805 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords)) 806 807 return text_expansion_keywords
808 #------------------------------------------------------------------------
809 -def expand_keyword(keyword = None):
810 811 # Easter Egg ;-) 812 if keyword == u'$$steffi': 813 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)' 814 815 cmd = u"""select expansion from clin.v_your_keyword_expansions where keyword = %(kwd)s""" 816 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 817 818 if len(rows) == 0: 819 return None 820 821 return rows[0]['expansion']
822 #------------------------------------------------------------------------
823 -def get_keyword_expansion_candidates(keyword = None):
824 825 if keyword is None: 826 return [] 827 828 get_text_expansion_keywords() 829 830 candidates = [] 831 for kwd in text_expansion_keywords: 832 if kwd['keyword'].startswith(keyword): 833 candidates.append(kwd['keyword']) 834 835 return candidates
836 #------------------------------------------------------------------------
837 -def add_text_expansion(keyword=None, expansion=None, public=None):
838 839 if public: 840 cmd = u"select 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s" 841 else: 842 cmd = u"select 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s" 843 844 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 845 if len(rows) != 0: 846 return False 847 848 if public: 849 cmd = u""" 850 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 851 values (%(kwd)s, %(exp)s, null)""" 852 else: 853 cmd = u""" 854 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 855 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))""" 856 857 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}]) 858 859 global text_expansion_keywords 860 text_expansion_keywords = None 861 862 return True
863 #------------------------------------------------------------------------
864 -def delete_text_expansion(keyword):
865 cmd = u""" 866 delete from clin.keyword_expansion where 867 keyword = %(kwd)s and ( 868 (fk_staff = (select pk from dem.staff where db_user = current_user)) 869 or 870 (fk_staff is null and owner = current_user) 871 )""" 872 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 873 874 global text_expansion_keywords 875 text_expansion_keywords = None
876 #------------------------------------------------------------------------
877 -def edit_text_expansion(keyword, expansion):
878 879 cmd1 = u""" 880 delete from clin.keyword_expansion where 881 keyword = %(kwd)s and 882 fk_staff = (select pk from dem.staff where db_user = current_user)""" 883 884 cmd2 = u""" 885 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 886 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))""" 887 888 rows, idx = run_rw_queries(queries = [ 889 {'cmd': cmd1, 'args': {'kwd': keyword}}, 890 {'cmd': cmd2, 'args': {'kwd': keyword, 'exp': expansion}}, 891 ]) 892 893 global text_expansion_keywords 894 text_expansion_keywords = None
895 # ======================================================================= 896 # query runners and helpers 897 # =======================================================================
898 -def send_maintenance_notification():
899 cmd = u'notify "db_maintenance_warning:"' 900 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
901 #------------------------------------------------------------------------
902 -def send_maintenance_shutdown():
903 cmd = u'notify "db_maintenance_disconnect:"' 904 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
905 #------------------------------------------------------------------------
906 -def is_pg_interval(candidate=None):
907 cmd = u'select %(candidate)s::interval' 908 try: 909 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 910 return True 911 except: 912 cmd = u'select %(candidate)s::text::interval' 913 try: 914 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 915 return True 916 except: 917 return False
918 #------------------------------------------------------------------------
919 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
920 outfile = file(filename, 'wb') 921 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 922 outfile.close() 923 return result
924 #------------------------------------------------------------------------
925 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
926 """Store data from a bytea field into a file. 927 928 <data_query> 929 - dict {'cmd': ..., 'args': ...} 930 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 931 - 'args' must be a dict 932 - must return one row with one field of type bytea 933 <file> 934 - must be a file like Python object 935 <data_size> 936 - integer of the total size of the expected data or None 937 <data_size_query> 938 - dict {'cmd': ..., 'args': ...} 939 - cmd must be unicode 940 - must return one row with one field with the octet_length() of the data field 941 - used only when <data_size> is None 942 """ 943 if data_size == 0: 944 return True 945 946 # If the client sets an encoding other than the default we 947 # will receive encoding-parsed data which isn't the binary 948 # content we want. Hence we need to get our own connection. 949 # It must be a read-write one so that we don't affect the 950 # encoding for other users of the shared read-only 951 # connections. 952 # Actually, encodings shouldn't be applied to binary data 953 # (eg. bytea types) in the first place but that is only 954 # reported to be fixed > v7.4. 955 # further tests reveal that at least on PG 8.0 this bug still 956 # manifests itself 957 conn = get_raw_connection(readonly=True) 958 959 if data_size is None: 960 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 961 data_size = rows[0][0] 962 if data_size in [None, 0]: 963 conn.rollback() 964 return True 965 966 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 967 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 968 969 # chunk size of 0 means "retrieve whole field at once" 970 if chunk_size == 0: 971 chunk_size = data_size 972 _log.debug('chunk size [0] bytes: retrieving all data at once') 973 974 # Windoze sucks: it can't transfer objects of arbitrary size, 975 # anyways, we need to split the transfer, 976 # however, only possible if postgres >= 7.2 977 needed_chunks, remainder = divmod(data_size, chunk_size) 978 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 979 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 980 981 # try setting "bytea_output" 982 # - fails if not necessary 983 # - succeeds if necessary 984 try: 985 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 986 except dbapi.ProgrammingError: 987 _log.debug('failed to set bytea_output to "escacpe", not necessary') 988 989 # retrieve chunks, skipped if data size < chunk size, 990 # does this not carry the danger of cutting up multi-byte escape sequences ? 991 # no, since bytea is binary, 992 # yes, since in bytea there are *some* escaped values, still 993 # no, since those are only escaped during *transfer*, not on-disk, hence 994 # only complete escape sequences are put on the wire 995 for chunk_id in range(needed_chunks): 996 chunk_start = (chunk_id * chunk_size) + 1 997 data_query['args']['start'] = chunk_start 998 data_query['args']['size'] = chunk_size 999 try: 1000 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1001 except: 1002 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 1003 conn.rollback() 1004 raise 1005 # it would be a fatal error to see more than one result as ids are supposed to be unique 1006 file_obj.write(str(rows[0][0])) 1007 1008 # retrieve remainder 1009 if remainder > 0: 1010 chunk_start = (needed_chunks * chunk_size) + 1 1011 data_query['args']['start'] = chunk_start 1012 data_query['args']['size'] = remainder 1013 try: 1014 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1015 except: 1016 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1017 conn.rollback() 1018 raise 1019 # it would be a fatal error to see more than one result as ids are supposed to be unique 1020 file_obj.write(str(rows[0][0])) 1021 1022 conn.rollback() 1023 return True
1024 #------------------------------------------------------------------------
1025 -def file2bytea(query=None, filename=None, args=None, conn=None):
1026 """Store data from a file into a bytea field. 1027 1028 The query must: 1029 - be in unicode 1030 - contain a format spec identifying the row (eg a primary key) 1031 matching <args> if it is an UPDATE 1032 - contain a format spec %(data)s::bytea 1033 """ 1034 # read data from file 1035 infile = file(filename, "rb") 1036 data_as_byte_string = infile.read() 1037 infile.close() 1038 if args is None: 1039 args = {} 1040 args['data'] = buffer(data_as_byte_string) 1041 del(data_as_byte_string) 1042 1043 # insert the data 1044 if conn is None: 1045 conn = get_raw_connection(readonly=False) 1046 close_conn = True 1047 else: 1048 close_conn = False 1049 1050 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1051 1052 if close_conn: 1053 conn.close() 1054 1055 return
1056 #------------------------------------------------------------------------
1057 -def sanitize_pg_regex(expression=None, escape_all=False):
1058 """Escape input for use in a PostgreSQL regular expression. 1059 1060 If a fragment comes from user input and is to be used 1061 as a regular expression we need to make sure it doesn't 1062 contain invalid regex patterns such as unbalanced ('s. 1063 1064 <escape_all> 1065 True: try to escape *all* metacharacters 1066 False: only escape those which render the regex invalid 1067 """ 1068 return expression.replace ( 1069 '(', '\(' 1070 ).replace ( 1071 ')', '\)' 1072 ).replace ( 1073 '[', '\[' 1074 ).replace ( 1075 '+', '\+' 1076 ).replace ( 1077 '.', '\.' 1078 ).replace ( 1079 '*', '\*' 1080 )
1081 #']', '\]', # not needed 1082 #------------------------------------------------------------------------
1083 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1084 """Run read-only queries. 1085 1086 <queries> must be a list of dicts: 1087 [ 1088 {'cmd': <string>, 'args': <dict> or <tuple>}, 1089 {...}, 1090 ... 1091 ] 1092 """ 1093 if isinstance(link_obj, dbapi._psycopg.cursor): 1094 curs = link_obj 1095 curs_close = __noop 1096 tx_rollback = __noop 1097 elif isinstance(link_obj, dbapi._psycopg.connection): 1098 curs = link_obj.cursor() 1099 curs_close = curs.close 1100 tx_rollback = link_obj.rollback 1101 elif link_obj is None: 1102 conn = get_connection(readonly=True, verbose=verbose) 1103 curs = conn.cursor() 1104 curs_close = curs.close 1105 tx_rollback = conn.rollback 1106 else: 1107 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1108 1109 if verbose: 1110 _log.debug('cursor: %s', curs) 1111 1112 for query in queries: 1113 if type(query['cmd']) is not types.UnicodeType: 1114 print "run_ro_queries(): non-unicode query" 1115 print query['cmd'] 1116 try: 1117 args = query['args'] 1118 except KeyError: 1119 args = None 1120 try: 1121 curs.execute(query['cmd'], args) 1122 if verbose: 1123 _log.debug('ran query: [%s]', curs.query) 1124 _log.debug('PG status message: %s', curs.statusmessage) 1125 _log.debug('cursor description: %s', str(curs.description)) 1126 except: 1127 # FIXME: use .pgcode 1128 try: 1129 curs_close() 1130 except dbapi.InterfaceError: 1131 _log.exception('cannot close cursor') 1132 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1133 _log.error('query failed: [%s]', curs.query) 1134 _log.error('PG status message: %s', curs.statusmessage) 1135 raise 1136 1137 data = None 1138 col_idx = None 1139 if return_data: 1140 data = curs.fetchall() 1141 if verbose: 1142 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1143 _log.debug('cursor description: %s', str(curs.description)) 1144 if get_col_idx: 1145 col_idx = get_col_indices(curs) 1146 1147 curs_close() 1148 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1149 return (data, col_idx)
1150 #------------------------------------------------------------------------
1151 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1152 """Convenience function for running a transaction 1153 that is supposed to get committed. 1154 1155 <link_obj> 1156 can be either: 1157 - a cursor 1158 - a connection 1159 1160 <queries> 1161 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1162 to be executed as a single transaction, the last 1163 query may usefully return rows (such as a 1164 "select currval('some_sequence')" statement) 1165 1166 <end_tx> 1167 - controls whether the transaction is finalized (eg. 1168 committed/rolled back) or not, this allows the 1169 call to run_rw_queries() to be part of a framing 1170 transaction 1171 - if link_obj is a connection then <end_tx> will 1172 default to False unless it is explicitly set to 1173 True which is taken to mean "yes, you do have full 1174 control over the transaction" in which case the 1175 transaction is properly finalized 1176 - if link_obj is a cursor we CANNOT finalize the 1177 transaction because we would need the connection for that 1178 - if link_obj is None <end_tx> will, of course, always be True 1179 1180 <return_data> 1181 - if true, the returned data will include the rows 1182 the last query selected 1183 - if false, it returns None instead 1184 1185 <get_col_idx> 1186 - if true, the returned data will include a dictionary 1187 mapping field names to column positions 1188 - if false, the returned data returns None instead 1189 1190 method result: 1191 - returns a tuple (data, idx) 1192 - <data>: 1193 * (None, None) if last query did not return rows 1194 * ("fetchall() result", <index>) if last query returned any rows 1195 * for <index> see <get_col_idx> 1196 """ 1197 if isinstance(link_obj, dbapi._psycopg.cursor): 1198 conn_close = __noop 1199 conn_commit = __noop 1200 conn_rollback = __noop 1201 curs = link_obj 1202 curs_close = __noop 1203 elif isinstance(link_obj, dbapi._psycopg.connection): 1204 conn_close = __noop 1205 if end_tx: 1206 conn_commit = link_obj.commit 1207 conn_rollback = link_obj.rollback 1208 else: 1209 conn_commit = __noop 1210 conn_rollback = __noop 1211 curs = link_obj.cursor() 1212 curs_close = curs.close 1213 elif link_obj is None: 1214 conn = get_connection(readonly=False) 1215 conn_close = conn.close 1216 conn_commit = conn.commit 1217 conn_rollback = conn.rollback 1218 curs = conn.cursor() 1219 curs_close = curs.close 1220 else: 1221 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1222 1223 for query in queries: 1224 if type(query['cmd']) is not types.UnicodeType: 1225 print "run_rw_queries(): non-unicode query" 1226 print query['cmd'] 1227 try: 1228 args = query['args'] 1229 except KeyError: 1230 args = None 1231 try: 1232 curs.execute(query['cmd'], args) 1233 except: 1234 _log.exception('error running RW query') 1235 gmLog2.log_stack_trace() 1236 try: 1237 curs_close() 1238 conn_rollback() 1239 conn_close() 1240 except dbapi.InterfaceError: 1241 _log.exception('cannot cleanup') 1242 raise 1243 raise 1244 1245 data = None 1246 col_idx = None 1247 if return_data: 1248 try: 1249 data = curs.fetchall() 1250 except: 1251 _log.exception('error fetching data from RW query') 1252 gmLog2.log_stack_trace() 1253 try: 1254 curs_close() 1255 conn_rollback() 1256 conn_close() 1257 except dbapi.InterfaceError: 1258 _log.exception('cannot cleanup') 1259 raise 1260 raise 1261 if get_col_idx: 1262 col_idx = get_col_indices(curs) 1263 1264 curs_close() 1265 conn_commit() 1266 conn_close() 1267 1268 return (data, col_idx)
1269 #------------------------------------------------------------------------
1270 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1271 """Generates SQL for an INSERT query. 1272 1273 values: dict of values keyed by field to insert them into 1274 """ 1275 if schema is None: 1276 schema = u'public' 1277 1278 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1279 val_snippets = [] 1280 for field in fields: 1281 val_snippets.append(u'%%(%s)s' % field) 1282 1283 if returning is None: 1284 returning = u'' 1285 return_data = False 1286 else: 1287 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1288 return_data = True 1289 1290 cmd = u"""\nINSERT INTO %s.%s ( 1291 %s 1292 ) VALUES ( 1293 %s 1294 )%s""" % ( 1295 schema, 1296 table, 1297 u',\n\t\t'.join(fields), 1298 u',\n\t\t'.join(val_snippets), 1299 returning 1300 ) 1301 1302 _log.debug(u'running SQL: >>>%s<<<', cmd) 1303 1304 return run_rw_queries ( 1305 link_obj = link_obj, 1306 queries = [{'cmd': cmd, 'args': values}], 1307 end_tx = end_tx, 1308 return_data = return_data, 1309 get_col_idx = get_col_idx, 1310 verbose = verbose 1311 )
1312 # ======================================================================= 1313 # connection handling API 1314 # -----------------------------------------------------------------------
1315 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1316 """ 1317 GNUmed database connection pool. 1318 1319 Extends psycopg2's PersistentConnectionPool with 1320 a custom _connect() function. Supports one connection 1321 per thread - which also ties it to one particular DSN. 1322 """ 1323 #--------------------------------------------------
1324 - def _connect(self, key=None):
1325 1326 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1327 1328 conn.original_close = conn.close 1329 conn.close = _raise_exception_on_ro_conn_close 1330 1331 if key is not None: 1332 self._used[key] = conn 1333 self._rused[id(conn)] = key 1334 else: 1335 self._pool.append(conn) 1336 1337 return conn
1338 #--------------------------------------------------
1339 - def shutdown(self):
1340 for conn_key in self._used.keys(): 1341 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1342 self._used[conn_key].original_close()
1343 # -----------------------------------------------------------------------
1344 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1345 """Get a raw, unadorned connection. 1346 1347 - this will not set any parameters such as encoding, timezone, datestyle 1348 - the only requirement is a valid DSN 1349 - hence it can be used for "service" connections 1350 for verifying encodings etc 1351 """ 1352 # FIXME: support verbose 1353 if dsn is None: 1354 dsn = get_default_dsn() 1355 1356 if u'host=salaam.homeunix' in dsn: 1357 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1358 1359 try: 1360 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1361 except dbapi.OperationalError, e: 1362 1363 t, v, tb = sys.exc_info() 1364 try: 1365 msg = e.args[0] 1366 except (AttributeError, IndexError, TypeError): 1367 raise 1368 1369 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1370 1371 if msg.find('fe_sendauth') != -1: 1372 raise cAuthenticationError, (dsn, msg), tb 1373 1374 if regex.search('user ".*" does not exist', msg) is not None: 1375 raise cAuthenticationError, (dsn, msg), tb 1376 1377 if msg.find('uthenti') != -1: 1378 raise cAuthenticationError, (dsn, msg), tb 1379 1380 raise 1381 1382 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1383 1384 # do first-time stuff 1385 global postgresql_version 1386 if postgresql_version is None: 1387 curs = conn.cursor() 1388 curs.execute (""" 1389 select 1390 (split_part(setting, '.', 1) || '.' || split_part(setting, '.', 2))::numeric as version 1391 from pg_settings 1392 where name='server_version'""" 1393 ) 1394 postgresql_version = curs.fetchone()['version'] 1395 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1396 try: 1397 curs.execute("select pg_size_pretty(pg_database_size(current_database()))") 1398 _log.info('database size: %s', curs.fetchone()[0]) 1399 except: 1400 pass 1401 if verbose: 1402 __log_PG_settings(curs=curs) 1403 curs.close() 1404 conn.commit() 1405 1406 if _default_client_timezone is None: 1407 __detect_client_timezone(conn = conn) 1408 1409 curs = conn.cursor() 1410 1411 # set access mode 1412 if readonly: 1413 _log.debug('access mode [READ ONLY]') 1414 cmd = 'set session characteristics as transaction READ ONLY' 1415 curs.execute(cmd) 1416 cmd = 'set default_transaction_read_only to on' 1417 curs.execute(cmd) 1418 else: 1419 _log.debug('access mode [READ WRITE]') 1420 cmd = 'set session characteristics as transaction READ WRITE' 1421 curs.execute(cmd) 1422 cmd = 'set default_transaction_read_only to off' 1423 curs.execute(cmd) 1424 1425 curs.close() 1426 conn.commit() 1427 1428 conn.is_decorated = False 1429 1430 return conn
1431 # =======================================================================
1432 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1433 """Get a new connection. 1434 1435 This assumes the locale system has been initialized 1436 unless an encoding is specified. 1437 """ 1438 # FIXME: support pooled on RW, too 1439 # FIXME: for now, support the default DSN only 1440 if pooled and readonly and (dsn is None): 1441 global __ro_conn_pool 1442 if __ro_conn_pool is None: 1443 __ro_conn_pool = cConnectionPool ( 1444 minconn = 1, 1445 maxconn = 2, 1446 dsn = dsn, 1447 verbose = verbose 1448 ) 1449 conn = __ro_conn_pool.getconn() 1450 else: 1451 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1452 1453 if conn.is_decorated: 1454 return conn 1455 1456 if encoding is None: 1457 encoding = _default_client_encoding 1458 if encoding is None: 1459 encoding = gmI18N.get_encoding() 1460 _log.warning('client encoding not specified') 1461 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1462 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1463 1464 # set connection properties 1465 # - client encoding 1466 try: 1467 conn.set_client_encoding(encoding) 1468 except dbapi.OperationalError: 1469 t, v, tb = sys.exc_info() 1470 if str(v).find("can't set encoding to") != -1: 1471 raise cEncodingError, (encoding, v), tb 1472 raise 1473 1474 # - transaction isolation level 1475 if readonly: 1476 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) 1477 iso_level = u'read committed' 1478 else: 1479 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1480 iso_level = u'serializable' 1481 1482 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s], datestyle [ISO], sql_inheritance [ON]', encoding, iso_level, _default_client_timezone) 1483 1484 curs = conn.cursor() 1485 1486 # - client time zone 1487 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1488 1489 # - datestyle 1490 # regarding DMY/YMD handling: since we force *input* to 1491 # ISO, too, the DMY/YMD setting is not needed 1492 cmd = "set datestyle to 'ISO'" 1493 curs.execute(cmd) 1494 1495 # - SQL inheritance mode 1496 cmd = 'set sql_inheritance to on' 1497 curs.execute(cmd) 1498 1499 # - version string 1500 global postgresql_version_string 1501 if postgresql_version_string is None: 1502 curs.execute('select version()') 1503 postgresql_version_string = curs.fetchone()['version'] 1504 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1505 1506 conn.commit() 1507 1508 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1509 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1510 # 1511 # - bytea data format 1512 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1513 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1514 # as the transmission mode for bytea output, 1515 # so try to set this setting back to "escape", 1516 # if that's not possible the reason will be that PG < 9.0 does not support 1517 # that setting - which also means we don't need it and can ignore the 1518 # failure 1519 cmd = "set bytea_output to 'escape'" 1520 try: 1521 curs.execute(cmd) 1522 except dbapi.ProgrammingError: 1523 _log.error('cannot set bytea_output format') 1524 1525 curs.close() 1526 conn.commit() 1527 1528 conn.is_decorated = True 1529 1530 return conn
1531 #-----------------------------------------------------------------------
1532 -def shutdown():
1533 if __ro_conn_pool is None: 1534 return 1535 __ro_conn_pool.shutdown()
1536 # ====================================================================== 1537 # internal helpers 1538 #-----------------------------------------------------------------------
1539 -def __noop():
1540 pass
1541 #-----------------------------------------------------------------------
1542 -def _raise_exception_on_ro_conn_close():
1543 raise TypeError(u'close() called on read-only connection')
1544 #-----------------------------------------------------------------------
1545 -def log_database_access(action=None):
1546 run_insert ( 1547 schema = u'gm', 1548 table = u'access_log', 1549 values = {u'user_action': action}, 1550 end_tx = True 1551 )
1552 #-----------------------------------------------------------------------
1553 -def sanity_check_time_skew(tolerance=60):
1554 """Check server time and local time to be within 1555 the given tolerance of each other. 1556 1557 tolerance: seconds 1558 """ 1559 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1560 1561 cmd = u"select now() at time zone 'UTC'" 1562 conn = get_raw_connection(readonly=True) 1563 curs = conn.cursor() 1564 1565 start = time.time() 1566 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1567 end = time.time() 1568 client_now_as_utc = pydt.datetime.utcnow() 1569 1570 curs.close() 1571 conn.commit() 1572 1573 server_now_as_utc = rows[0][0] 1574 query_duration = end - start 1575 _log.info('server "now" (UTC): %s', server_now_as_utc) 1576 _log.info('client "now" (UTC): %s', client_now_as_utc) 1577 _log.debug('wire roundtrip (seconds): %s', query_duration) 1578 1579 if query_duration > tolerance: 1580 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1581 return False 1582 1583 if server_now_as_utc > client_now_as_utc: 1584 real_skew = server_now_as_utc - client_now_as_utc 1585 else: 1586 real_skew = client_now_as_utc - server_now_as_utc 1587 1588 _log.debug('client/server time skew: %s', real_skew) 1589 1590 if real_skew > pydt.timedelta(seconds = tolerance): 1591 _log.error('client/server time skew > tolerance') 1592 return False 1593 1594 return True
1595 #-----------------------------------------------------------------------
1596 -def sanity_check_database_settings():
1597 """Checks database settings. 1598 1599 returns (status, message) 1600 status: 1601 0: no problem 1602 1: non-fatal problem 1603 2: fatal problem 1604 """ 1605 _log.debug('checking database settings') 1606 settings = { 1607 # setting: [expected value, risk, fatal?] 1608 u'allow_system_table_mods': [u'off', u'system breakage', False], 1609 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1610 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1611 u'fsync': [u'on', u'data loss/corruption', True], 1612 u'full_page_writes': [u'on', u'data loss/corruption', False], 1613 u'lc_messages': [u'C', u'suboptimal error detection', False], 1614 u'password_encryption': [u'on', u'breach of confidentiality', False], 1615 #u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1616 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1617 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1618 } 1619 1620 from Gnumed.pycommon import gmCfg2 1621 _cfg = gmCfg2.gmCfgData() 1622 if _cfg.get(option = u'hipaa'): 1623 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1624 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1625 else: 1626 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1627 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1628 1629 cmd = u"select name, setting from pg_settings where name in %(settings)s" 1630 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'settings': tuple(settings.keys())}}]) 1631 1632 found_error = False 1633 found_problem = False 1634 msg = [] 1635 for row in rows: 1636 if row[1] != settings[row[0]][0]: 1637 if settings[row[0]][2] is True: 1638 found_error = True 1639 elif settings[row[0]][2] is False: 1640 found_problem = True 1641 elif settings[row[0]][2] is None: 1642 pass 1643 else: 1644 _log.error(settings[row[0]]) 1645 raise ValueError(u'invalid database configuration sanity check') 1646 msg.append(_(' option [%s]: %s') % (row[0], row[1])) 1647 msg.append(_(' risk: %s') % settings[row[0]][1]) 1648 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (row[0], row[1], settings[row[0]][0], settings[row[0]][1])) 1649 1650 if found_error: 1651 return 2, u'\n'.join(msg) 1652 1653 if found_problem: 1654 return 1, u'\n'.join(msg) 1655 1656 return 0, u''
1657 #------------------------------------------------------------------------
1658 -def __log_PG_settings(curs=None):
1659 # don't use any of the run_*()s since that might 1660 # create a loop if we fail here 1661 # FIXME: use pg_settings 1662 try: 1663 curs.execute(u'show all') 1664 except: 1665 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1666 return False 1667 settings = curs.fetchall() 1668 if settings is None: 1669 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1670 return False 1671 for setting in settings: 1672 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1673 return True
1674 # =======================================================================
1675 -def extract_msg_from_pg_exception(exc=None):
1676 1677 try: 1678 msg = exc.args[0] 1679 except (AttributeError, IndexError, TypeError): 1680 return u'cannot extract message from exception' 1681 1682 return unicode(msg, gmI18N.get_encoding(), 'replace')
1683 # =======================================================================
1684 -class cAuthenticationError(dbapi.OperationalError):
1685
1686 - def __init__(self, dsn=None, prev_val=None):
1687 self.dsn = dsn 1688 self.prev_val = prev_val
1689
1690 - def __str__(self):
1691 _log.warning('%s.__str__() called', self.__class__.__name__) 1692 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1693 _log.error(tmp) 1694 return tmp.encode(gmI18N.get_encoding(), 'replace')
1695
1696 - def __unicode__(self):
1697 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1698 1699 # ======================================================================= 1700 # custom psycopg2 extensions 1701 # =======================================================================
1702 -class cEncodingError(dbapi.OperationalError):
1703
1704 - def __init__(self, encoding=None, prev_val=None):
1705 self.encoding = encoding 1706 self.prev_val = prev_val
1707
1708 - def __str__(self):
1709 _log.warning('%s.__str__() called', self.__class__.__name__) 1710 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1711
1712 - def __unicode__(self):
1713 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1714 1715 # ----------------------------------------------------------------------- 1716 # Python -> PostgreSQL 1717 # ----------------------------------------------------------------------- 1718 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1719 -class cAdapterPyDateTime(object):
1720
1721 - def __init__(self, dt):
1722 if dt.tzinfo is None: 1723 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1724 self.__dt = dt
1725
1726 - def getquoted(self):
1727 return _timestamp_template % self.__dt.isoformat()
1728 1729 # remove for 0.9 1730 # ---------------------------------------------------------------------- 1731 #class cAdapterMxDateTime(object): 1732 # 1733 # def __init__(self, dt): 1734 # if dt.tz == '???': 1735 # _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1736 # self.__dt = dt 1737 # 1738 # def getquoted(self): 1739 # # under some locale settings the mx.DateTime ISO formatter 1740 # # will insert "," into the ISO string, 1741 # # while this is allowed per the ISO8601 spec PostgreSQL 1742 # # cannot currently handle that, 1743 # # so map those "," to "." to make things work: 1744 # return mxDT.ISO.str(self.__dt).replace(',', '.') 1745 # 1746 # ---------------------------------------------------------------------- 1747 # PostgreSQL -> Python 1748 # ---------------------------------------------------------------------- 1749 1750 #======================================================================= 1751 # main 1752 #----------------------------------------------------------------------- 1753 1754 # make sure psycopg2 knows how to handle unicode ... 1755 # intended to become standard 1756 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1757 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1758 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1759 1760 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1761 # check in 0.9: 1762 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1763 1764 # remove for 0.9 1765 try: 1766 import mx.DateTime as mxDT 1767 # psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1768 except ImportError: 1769 _log.warning('cannot import mx.DateTime') 1770 1771 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1772 # them adapted to "... ARRAY()..." so we can support PG arrays 1773 1774 #======================================================================= 1775 if __name__ == "__main__": 1776 1777 if len(sys.argv) < 2: 1778 sys.exit() 1779 1780 if sys.argv[1] != 'test': 1781 sys.exit() 1782 1783 logging.basicConfig(level=logging.DEBUG) 1784 #--------------------------------------------------------------------
1785 - def test_file2bytea():
1786 run_rw_queries(queries = [ 1787 {'cmd': u'create table test_bytea (data bytea)'} 1788 ]) 1789 1790 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1791 try: 1792 file2bytea(query = cmd, filename = sys.argv[2]) 1793 except: 1794 _log.exception('error') 1795 1796 run_rw_queries(queries = [ 1797 {'cmd': u'drop table test_bytea'} 1798 ])
1799 #--------------------------------------------------------------------
1800 - def test_get_connection():
1801 print "testing get_connection()" 1802 1803 dsn = 'foo' 1804 try: 1805 conn = get_connection(dsn=dsn) 1806 except dbapi.OperationalError, e: 1807 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1808 t, v = sys.exc_info()[:2] 1809 print ' ', t 1810 print ' ', v 1811 1812 dsn = 'dbname=gnumed_v9' 1813 try: 1814 conn = get_connection(dsn=dsn) 1815 except cAuthenticationError: 1816 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1817 t, v = sys.exc_info()[:2] 1818 print ' ', t 1819 print ' ', v 1820 1821 dsn = 'dbname=gnumed_v9 user=abc' 1822 try: 1823 conn = get_connection(dsn=dsn) 1824 except cAuthenticationError: 1825 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1826 t, v = sys.exc_info()[:2] 1827 print ' ', t 1828 print ' ', v 1829 1830 dsn = 'dbname=gnumed_v9 user=any-doc' 1831 try: 1832 conn = get_connection(dsn=dsn) 1833 except cAuthenticationError: 1834 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1835 t, v = sys.exc_info()[:2] 1836 print ' ', t 1837 print ' ', v 1838 1839 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1840 try: 1841 conn = get_connection(dsn=dsn) 1842 except cAuthenticationError: 1843 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1844 t, v = sys.exc_info()[:2] 1845 print ' ', t 1846 print ' ', v 1847 1848 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1849 conn = get_connection(dsn=dsn, readonly=True) 1850 1851 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1852 conn = get_connection(dsn=dsn, readonly=False) 1853 1854 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1855 encoding = 'foo' 1856 try: 1857 conn = get_connection(dsn=dsn, encoding=encoding) 1858 except cEncodingError: 1859 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1860 t, v = sys.exc_info()[:2] 1861 print ' ', t 1862 print ' ', v
1863 #--------------------------------------------------------------------
1864 - def test_exceptions():
1865 print "testing exceptions" 1866 1867 try: 1868 raise cAuthenticationError('no dsn', 'no previous exception') 1869 except cAuthenticationError: 1870 t, v, tb = sys.exc_info() 1871 print t 1872 print v 1873 print tb 1874 1875 try: 1876 raise cEncodingError('no dsn', 'no previous exception') 1877 except cEncodingError: 1878 t, v, tb = sys.exc_info() 1879 print t 1880 print v 1881 print tb
1882 #--------------------------------------------------------------------
1883 - def test_ro_queries():
1884 print "testing run_ro_queries()" 1885 1886 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1887 conn = get_connection(dsn, readonly=True) 1888 1889 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True) 1890 print data 1891 print idx 1892 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True) 1893 print data 1894 print idx 1895 1896 curs = conn.cursor() 1897 1898 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True) 1899 print data 1900 print idx 1901 1902 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True, verbose=True) 1903 print data 1904 print idx 1905 1906 try: 1907 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1908 print data 1909 print idx 1910 except psycopg2.ProgrammingError: 1911 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1912 t, v = sys.exc_info()[:2] 1913 print ' ', t 1914 print ' ', v 1915 1916 curs.close()
1917 #--------------------------------------------------------------------
1918 - def test_request_dsn():
1919 conn = get_connection() 1920 print conn 1921 conn.close()
1922 #--------------------------------------------------------------------
1923 - def test_set_encoding():
1924 print "testing set_default_client_encoding()" 1925 1926 enc = 'foo' 1927 try: 1928 set_default_client_encoding(enc) 1929 print "SUCCESS: encoding [%s] worked" % enc 1930 except ValueError: 1931 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1932 t, v = sys.exc_info()[:2] 1933 print ' ', t 1934 print ' ', v 1935 1936 enc = '' 1937 try: 1938 set_default_client_encoding(enc) 1939 print "SUCCESS: encoding [%s] worked" % enc 1940 except ValueError: 1941 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1942 t, v = sys.exc_info()[:2] 1943 print ' ', t 1944 print ' ', v 1945 1946 enc = 'latin1' 1947 try: 1948 set_default_client_encoding(enc) 1949 print "SUCCESS: encoding [%s] worked" % enc 1950 except ValueError: 1951 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1952 t, v = sys.exc_info()[:2] 1953 print ' ', t 1954 print ' ', v 1955 1956 enc = 'utf8' 1957 try: 1958 set_default_client_encoding(enc) 1959 print "SUCCESS: encoding [%s] worked" % enc 1960 except ValueError: 1961 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1962 t, v = sys.exc_info()[:2] 1963 print ' ', t 1964 print ' ', v 1965 1966 enc = 'unicode' 1967 try: 1968 set_default_client_encoding(enc) 1969 print "SUCCESS: encoding [%s] worked" % enc 1970 except ValueError: 1971 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1972 t, v = sys.exc_info()[:2] 1973 print ' ', t 1974 print ' ', v 1975 1976 enc = 'UNICODE' 1977 try: 1978 set_default_client_encoding(enc) 1979 print "SUCCESS: encoding [%s] worked" % enc 1980 except ValueError: 1981 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1982 t, v = sys.exc_info()[:2] 1983 print ' ', t 1984 print ' ', v
1985 #--------------------------------------------------------------------
1986 - def test_connection_pool():
1987 dsn = get_default_dsn() 1988 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 1989 print pool 1990 print pool.getconn() 1991 print pool.getconn() 1992 print pool.getconn() 1993 print type(pool.getconn())
1994 #--------------------------------------------------------------------
1995 - def test_list_args():
1996 dsn = get_default_dsn() 1997 conn = get_connection(dsn, readonly=True) 1998 curs = conn.cursor() 1999 curs.execute('select * from clin.clin_narrative where narrative = %s', ['a'])
2000 #--------------------------------------------------------------------
2001 - def test_sanitize_pg_regex():
2002 tests = [ 2003 ['(', '\\('] 2004 , ['[', '\\['] 2005 , [')', '\\)'] 2006 ] 2007 for test in tests: 2008 result = sanitize_pg_regex(test[0]) 2009 if result != test[1]: 2010 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2011 #--------------------------------------------------------------------
2012 - def test_is_pg_interval():
2013 status = True 2014 tests = [ 2015 [None, True], # None == NULL == succeeds ! 2016 [1, True], 2017 ['1', True], 2018 ['abc', False] 2019 ] 2020 2021 if not is_pg_interval(): 2022 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2023 status = False 2024 2025 for test in tests: 2026 result = is_pg_interval(test[0]) 2027 if result != test[1]: 2028 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2029 status = False 2030 2031 return status
2032 #--------------------------------------------------------------------
2033 - def test_sanity_check_time_skew():
2034 sanity_check_time_skew()
2035 #--------------------------------------------------------------------
2036 - def test_keyword_expansion():
2037 print "keywords, from database:" 2038 print get_text_expansion_keywords() 2039 print "keywords, cached:" 2040 print get_text_expansion_keywords() 2041 print "'$keyword' expands to:" 2042 print expand_keyword(keyword = u'$dvt')
2043 #--------------------------------------------------------------------
2044 - def test_get_foreign_key_details():
2045 for row in get_foreign_keys2column ( 2046 schema = u'dem', 2047 table = u'identity', 2048 column = u'pk' 2049 ): 2050 print '%s.%s references %s.%s.%s' % ( 2051 row['referencing_table'], 2052 row['referencing_column'], 2053 row['referenced_schema'], 2054 row['referenced_table'], 2055 row['referenced_column'] 2056 )
2057 #--------------------------------------------------------------------
2058 - def test_set_user_language():
2059 # (user, language, result, exception type) 2060 tests = [ 2061 # current user 2062 [None, 'de_DE', True], 2063 [None, 'lang_w/o_tx', False], 2064 [None, None, True], 2065 # valid user 2066 ['any-doc', 'de_DE', True], 2067 ['any-doc', 'lang_w/o_tx', False], 2068 ['any-doc', None, True], 2069 # invalid user 2070 ['invalid user', 'de_DE', None], 2071 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2072 ['invalid user', None, True] 2073 ] 2074 for test in tests: 2075 try: 2076 result = set_user_language(user = test[0], language = test[1]) 2077 if result != test[2]: 2078 print "test:", test 2079 print "result:", result, "expected:", test[2] 2080 except psycopg2.IntegrityError, e: 2081 if test[2] is None: 2082 continue 2083 print "test:", test 2084 print "expected exception" 2085 print "result:", e
2086 #--------------------------------------------------------------------
2087 - def test_get_schema_revision_history():
2088 for line in get_schema_revision_history(): 2089 print u' - '.join(line)
2090 #--------------------------------------------------------------------
2091 - def test_run_query():
2092 gmDateTime.init() 2093 args = {'dt': gmDateTime.pydt_max_here()} 2094 cmd = u"select %(dt)s" 2095 2096 #cmd = u"select 'infinity'::timestamp with time zone" 2097 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2098 print rows
2099 #-------------------------------------------------------------------- 2100 # run tests 2101 #test_file2bytea() 2102 #test_get_connection() 2103 #test_exceptions() 2104 #test_ro_queries() 2105 #test_request_dsn() 2106 #test_set_encoding() 2107 #test_connection_pool() 2108 #test_list_args() 2109 #test_sanitize_pg_regex() 2110 #test_is_pg_interval() 2111 #test_sanity_check_time_skew() 2112 #test_keyword_expansion() 2113 #test_get_foreign_key_details() 2114 #test_set_user_language() 2115 #test_get_schema_revision_history() 2116 test_run_query() 2117 2118 # ====================================================================== 2119