Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  15  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  16   
  17  # stdlib 
  18  import time 
  19  import sys 
  20  import os 
  21  import codecs 
  22  import types 
  23  import logging 
  24  import datetime as pydt 
  25  import re as regex 
  26   
  27   
  28  # GNUmed 
  29  if __name__ == '__main__': 
  30          sys.path.insert(0, '../../') 
  31  from Gnumed.pycommon import gmLoginInfo 
  32  from Gnumed.pycommon import gmExceptions 
  33  from Gnumed.pycommon import gmDateTime 
  34  from Gnumed.pycommon import gmBorg 
  35  from Gnumed.pycommon import gmI18N 
  36  from Gnumed.pycommon import gmLog2 
  37  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  38   
  39  _log = logging.getLogger('gm.db') 
  40   
  41   
  42  # 3rd party 
  43  try: 
  44          import psycopg2 as dbapi 
  45  except ImportError: 
  46          _log.exception("Python database adapter psycopg2 not found.") 
  47          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  48          raise 
  49   
  50   
  51  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  52  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  53  if not (float(dbapi.apilevel) >= 2.0): 
  54          raise ImportError('gmPG2: supported DB-API level too low') 
  55  if not (dbapi.threadsafety > 0): 
  56          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  57  if not (dbapi.paramstyle == 'pyformat'): 
  58          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('dt') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  63  try: 
  64          dbapi.__version__.index('ext') 
  65  except ValueError: 
  66          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  67  try: 
  68          dbapi.__version__.index('pq3') 
  69  except ValueError: 
  70          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  71   
  72  import psycopg2.extras 
  73  import psycopg2.extensions 
  74  import psycopg2.pool 
  75  import psycopg2.errorcodes as sql_error_codes 
  76   
  77  # ======================================================================= 
  78  _default_client_encoding = 'UTF8' 
  79  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  80   
  81  # things timezone 
  82  _default_client_timezone = None                 # default time zone for connections 
  83  _sql_set_timezone = None 
  84  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  85  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  86   
  87  _default_dsn = None 
  88  _default_login = None 
  89   
  90  postgresql_version_string = None 
  91  postgresql_version = None                       # accuracy: major.minor 
  92   
  93  __ro_conn_pool = None 
  94   
  95  auto_request_login_params = True 
  96  # ======================================================================= 
  97  # global data 
  98  # ======================================================================= 
  99   
 100  known_schema_hashes = { 
 101          0: 'not released, testing only', 
 102          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
 103          3: 'e73718eaf230d8f1d2d01afa8462e176', 
 104          4: '4428ccf2e54c289136819e701bb095ea', 
 105          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
 106          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
 107          7: '6c9f6d3981483f8e9433df99d1947b27', 
 108          8: '89b13a7af83337c3aad153b717e52360', 
 109          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 110          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 111          11: '03042ae24f3f92877d986fb0a6184d76', 
 112          12: '06183a6616db62257e22814007a8ed07', 
 113          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 114          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 115          15: '70012ff960b77ecdff4981c94b5b55b6', 
 116          16: '0bcf44ca22c479b52976e5eda1de8161', 
 117          17: '161428ee97a00e3bf56168c3a15b7b50' 
 118  } 
 119   
 120  map_schema_hash2version = { 
 121          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 122          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 123          '4428ccf2e54c289136819e701bb095ea': 4, 
 124          '7e7b093af57aea48c288e76632a382e5': 5, 
 125          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 126          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 127          '89b13a7af83337c3aad153b717e52360': 8, 
 128          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 129          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 130          '03042ae24f3f92877d986fb0a6184d76': 11, 
 131          '06183a6616db62257e22814007a8ed07': 12, 
 132          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 133          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 134          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 135          '0bcf44ca22c479b52976e5eda1de8161': 16, 
 136          '161428ee97a00e3bf56168c3a15b7b50': 17 
 137  } 
 138   
 139  map_client_branch2required_db_version = { 
 140          u'GIT tree': 0, 
 141          u'0.3': 9, 
 142          u'0.4': 10, 
 143          u'0.5': 11, 
 144          u'0.6': 12, 
 145          u'0.7': 13, 
 146          u'0.8': 14, 
 147          u'0.9': 15, 
 148          u'1.0': 16,             # intentional duplicate with 1.1 
 149          u'1.1': 16, 
 150          u'1.2': 17, 
 151          u'1.3': 18 
 152  } 
 153   
 154  # get columns and data types for a given table 
 155  query_table_col_defs = u"""select 
 156          cols.column_name, 
 157          cols.udt_name 
 158  from 
 159          information_schema.columns cols 
 160  where 
 161          cols.table_schema = %s 
 162                  and 
 163          cols.table_name = %s 
 164  order by 
 165          cols.ordinal_position""" 
 166   
 167  query_table_attributes = u"""select 
 168          cols.column_name 
 169  from 
 170          information_schema.columns cols 
 171  where 
 172          cols.table_schema = %s 
 173                  and 
 174          cols.table_name = %s 
 175  order by 
 176          cols.ordinal_position""" 
 177   
 178  # ======================================================================= 
 179  # module globals API 
 180  # ======================================================================= 
181 -def set_default_client_encoding(encoding = None):
182 # check whether psycopg2 can handle this encoding 183 if encoding not in psycopg2.extensions.encodings: 184 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 185 # check whether Python can handle this encoding 186 py_enc = psycopg2.extensions.encodings[encoding] 187 try: 188 codecs.lookup(py_enc) 189 except LookupError: 190 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 191 raise 192 # FIXME: check encoding against the database 193 # FIXME: - but we may not yet have access 194 # FIXME: - psycopg2 will pull its encodings from the database eventually 195 # it seems save to set it 196 global _default_client_encoding 197 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 198 _default_client_encoding = encoding 199 return True
200 #---------------------------------------------------
201 -def set_default_client_timezone(timezone = None):
202 203 # FIXME: use __validate 204 global _default_client_timezone 205 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 206 _default_client_timezone = timezone 207 208 global _sql_set_timezone 209 _sql_set_timezone = u'set timezone to %s' 210 211 return True
212 #---------------------------------------------------
213 -def __validate_timezone(conn=None, timezone=None):
214 215 _log.debug(u'validating time zone [%s]', timezone) 216 217 cmd = u'set timezone to %(tz)s' 218 args = {u'tz': timezone} 219 220 conn.commit() 221 curs = conn.cursor() 222 is_valid = False 223 try: 224 curs.execute(cmd, args) 225 _log.info(u'time zone [%s] is settable', timezone) 226 # can we actually use it, though ? 227 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 228 try: 229 curs.execute(cmd) 230 curs.fetchone() 231 _log.info(u'time zone [%s] is usable', timezone) 232 is_valid = True 233 except: 234 _log.error('error using time zone [%s]', timezone) 235 except dbapi.DataError: 236 _log.warning(u'time zone [%s] is not settable', timezone) 237 except: 238 _log.error(u'failed to set time zone to [%s]', timezone) 239 _log.exception(u'') 240 241 curs.close() 242 conn.rollback() 243 244 return is_valid
245 #---------------------------------------------------
246 -def __expand_timezone(conn=None, timezone=None):
247 """some timezone defs are abbreviations so try to expand 248 them because "set time zone" doesn't take abbreviations""" 249 250 cmd = u""" 251 select distinct on (abbrev) name 252 from pg_timezone_names 253 where 254 abbrev = %(tz)s and 255 name ~ '^[^/]+/[^/]+$' and 256 name !~ '^Etc/' 257 """ 258 args = {u'tz': timezone} 259 260 conn.commit() 261 curs = conn.cursor() 262 263 result = timezone 264 try: 265 curs.execute(cmd, args) 266 rows = curs.fetchall() 267 if len(rows) > 0: 268 result = rows[0][0] 269 _log.debug(u'[%s] maps to [%s]', timezone, result) 270 except: 271 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 272 273 curs.close() 274 conn.rollback() 275 276 return result
277 #---------------------------------------------------
278 -def __detect_client_timezone(conn=None):
279 """This is run on the very first connection.""" 280 281 # FIXME: check whether server.timezone is the same 282 # FIXME: value as what we eventually detect 283 284 # we need gmDateTime to be initialized 285 if gmDateTime.current_local_iso_numeric_timezone_string is None: 286 gmDateTime.init() 287 288 _log.debug('trying to detect timezone from system') 289 290 tz_candidates = [] 291 try: 292 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 293 tz_candidates.append(tz) 294 expanded = __expand_timezone(conn = conn, timezone = tz) 295 if expanded != tz: 296 tz_candidates.append(expanded) 297 except KeyError: 298 pass 299 300 tz_candidates.append(gmDateTime.current_local_timezone_name) 301 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 302 if expanded != gmDateTime.current_local_timezone_name: 303 tz_candidates.append(expanded) 304 305 _log.debug('candidates: %s', str(tz_candidates)) 306 307 # find best among candidates 308 global _default_client_timezone 309 global _sql_set_timezone 310 found = False 311 for tz in tz_candidates: 312 if __validate_timezone(conn = conn, timezone = tz): 313 _default_client_timezone = tz 314 _sql_set_timezone = u'set timezone to %s' 315 found = True 316 break 317 318 if not found: 319 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 320 _sql_set_timezone = u"set time zone interval %s hour to minute" 321 322 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
323 # ======================================================================= 324 # login API 325 # =======================================================================
326 -def __request_login_params_tui():
327 """Text mode request of database login parameters""" 328 import getpass 329 login = gmLoginInfo.LoginInfo() 330 331 print "\nPlease enter the required login parameters:" 332 try: 333 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 334 login.database = prompted_input(prompt = "database", default = 'gnumed_v18') 335 login.user = prompted_input(prompt = "user name", default = '') 336 tmp = 'password for "%s" (not shown): ' % login.user 337 login.password = getpass.getpass(tmp) 338 login.port = prompted_input(prompt = "port", default = 5432) 339 except KeyboardInterrupt: 340 _log.warning("user cancelled text mode login dialog") 341 print "user cancelled text mode login dialog" 342 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 343 344 return login
345 #---------------------------------------------------
346 -def __request_login_params_gui_wx():
347 """GUI (wx) input request for database login parameters. 348 349 Returns gmLoginInfo.LoginInfo object 350 """ 351 import wx 352 # OK, wxPython was already loaded. But has the main Application instance 353 # been initialized yet ? if not, the exception will kick us out 354 if wx.GetApp() is None: 355 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 356 357 # Let's launch the login dialog 358 # if wx was not initialized /no main App loop, an exception should be raised anyway 359 import gmAuthWidgets 360 dlg = gmAuthWidgets.cLoginDialog(None, -1) 361 dlg.ShowModal() 362 login = dlg.panel.GetLoginInfo() 363 dlg.Destroy() 364 365 #if user cancelled or something else went wrong, raise an exception 366 if login is None: 367 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 368 369 return login
370 #---------------------------------------------------
371 -def request_login_params():
372 """Request login parameters for database connection.""" 373 # do we auto-request parameters at all ? 374 if not auto_request_login_params: 375 raise Exception('Cannot request login parameters.') 376 377 # are we inside X ? 378 # (if we aren't wxGTK will crash hard at 379 # C-level with "can't open Display") 380 if os.environ.has_key('DISPLAY'): 381 # try wxPython GUI 382 try: return __request_login_params_gui_wx() 383 except: pass 384 385 # well, either we are on the console or 386 # wxPython does not work, use text mode 387 return __request_login_params_tui()
388 389 # ======================================================================= 390 # DSN API 391 # -----------------------------------------------------------------------
392 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
393 dsn_parts = [] 394 395 if (database is not None) and (database.strip() != ''): 396 dsn_parts.append('dbname=%s' % database) 397 398 if (host is not None) and (host.strip() != ''): 399 dsn_parts.append('host=%s' % host) 400 401 if (port is not None) and (str(port).strip() != ''): 402 dsn_parts.append('port=%s' % port) 403 404 if (user is not None) and (user.strip() != ''): 405 dsn_parts.append('user=%s' % user) 406 407 if (password is not None) and (password.strip() != ''): 408 dsn_parts.append('password=%s' % password) 409 410 dsn_parts.append('sslmode=prefer') 411 412 return ' '.join(dsn_parts)
413 # ------------------------------------------------------
414 -def get_default_login():
415 # make sure we do have a login 416 get_default_dsn() 417 return _default_login
418 # ------------------------------------------------------
419 -def get_default_dsn():
420 global _default_dsn 421 if _default_dsn is not None: 422 return _default_dsn 423 424 login = request_login_params() 425 set_default_login(login=login) 426 427 return _default_dsn
428 # ------------------------------------------------------
429 -def set_default_login(login=None):
430 if login is None: 431 return False 432 433 if login.host is not None: 434 if login.host.strip() == u'': 435 login.host = None 436 437 global _default_login 438 _default_login = login 439 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 440 441 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 442 443 global _default_dsn 444 if _default_dsn is None: 445 old_dsn = u'None' 446 else: 447 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 448 _log.info ('setting default DSN from [%s] to [%s]', 449 old_dsn, 450 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 451 ) 452 _default_dsn = dsn 453 454 return True
455 # ======================================================================= 456 # netadata API 457 # =======================================================================
458 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
459 expected_hash = known_schema_hashes[version] 460 if version == 0: 461 args = {'ver': 9999} 462 else: 463 args = {'ver': version} 464 rows, idx = run_ro_queries ( 465 link_obj = link_obj, 466 queries = [{ 467 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 468 'args': args 469 }] 470 ) 471 if rows[0]['md5'] != expected_hash: 472 _log.error('database schema version mismatch') 473 _log.error('expected: %s (%s)' % (version, expected_hash)) 474 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 475 if verbose: 476 _log.debug('schema dump follows:') 477 for line in get_schema_structure(link_obj=link_obj).split(): 478 _log.debug(line) 479 _log.debug('schema revision history dump follows:') 480 for line in get_schema_revision_history(link_obj=link_obj): 481 _log.debug(u' - '.join(line)) 482 return False 483 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 484 return True
485 #------------------------------------------------------------------------
486 -def get_schema_version(link_obj=None):
487 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 488 try: 489 return map_schema_hash2version[rows[0]['md5']] 490 except KeyError: 491 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
492 #------------------------------------------------------------------------
493 -def get_schema_structure(link_obj=None):
494 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 495 return rows[0][0]
496 #------------------------------------------------------------------------
497 -def get_schema_hash(link_obj=None):
498 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 499 return rows[0]['md5']
500 #------------------------------------------------------------------------
501 -def get_schema_revision_history(link_obj=None):
502 cmd = u""" 503 select 504 imported::text, 505 version, 506 filename 507 from gm.schema_revision 508 order by imported 509 """ 510 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 511 return rows
512 #------------------------------------------------------------------------
513 -def get_current_user():
514 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 515 return rows[0][0]
516 #------------------------------------------------------------------------
517 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
518 """Get the foreign keys pointing to schema.table.column. 519 520 Does not properly work with multi-column FKs. 521 GNUmed doesn't use any, however. 522 """ 523 cmd = u""" 524 select 525 %(schema)s as referenced_schema, 526 %(tbl)s as referenced_table, 527 %(col)s as referenced_column, 528 pgc.confkey as referenced_column_list, 529 pgc.conrelid::regclass as referencing_table, 530 pgc.conkey as referencing_column_list, 531 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 532 from 533 pg_constraint pgc 534 where 535 pgc.contype = 'f' 536 and 537 pgc.confrelid = ( 538 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 539 select oid from pg_namespace where nspname = %(schema)s 540 ) 541 ) and 542 ( 543 select attnum 544 from pg_attribute 545 where 546 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 547 select oid from pg_namespace where nspname = %(schema)s 548 )) 549 and 550 attname = %(col)s 551 ) = any(pgc.confkey) 552 """ 553 554 args = { 555 'schema': schema, 556 'tbl': table, 557 'col': column 558 } 559 560 rows, idx = run_ro_queries ( 561 link_obj = link_obj, 562 queries = [ 563 {'cmd': cmd, 'args': args} 564 ] 565 ) 566 567 return rows
568 #------------------------------------------------------------------------
569 -def get_child_tables(schema='public', table=None, link_obj=None):
570 """Return child tables of <table>.""" 571 cmd = u""" 572 select 573 pgn.nspname as namespace, 574 pgc.relname as table 575 from 576 pg_namespace pgn, 577 pg_class pgc 578 where 579 pgc.relnamespace = pgn.oid 580 and 581 pgc.oid in ( 582 select inhrelid from pg_inherits where inhparent = ( 583 select oid from pg_class where 584 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 585 relname = %(table)s 586 ) 587 )""" 588 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 589 return rows
590 #------------------------------------------------------------------------
591 -def schema_exists(link_obj=None, schema=u'gm'):
592 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 593 args = {'schema': schema} 594 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 595 return rows[0][0]
596 #------------------------------------------------------------------------
597 -def table_exists(link_obj=None, schema=None, table=None):
598 """Returns false, true.""" 599 cmd = u""" 600 select exists ( 601 select 1 from information_schema.tables 602 where 603 table_schema = %s and 604 table_name = %s and 605 table_type = 'BASE TABLE' 606 )""" 607 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 608 return rows[0][0]
609 #------------------------------------------------------------------------
610 -def get_col_indices(cursor = None):
611 if cursor.description is None: 612 _log.error('no result description available: unused cursor or last query did not select rows') 613 return None 614 col_indices = {} 615 col_index = 0 616 for col_desc in cursor.description: 617 col_name = col_desc[0] 618 # a query like "select 1,2;" will return two columns of the same name ! 619 # hence adjust to that, note, however, that dict-style access won't work 620 # on results of such queries ... 621 if col_indices.has_key(col_name): 622 col_name = '%s_%s' % (col_name, col_index) 623 col_indices[col_name] = col_index 624 col_index += 1 625 626 return col_indices
627 #------------------------------------------------------------------------
628 -def get_col_defs(link_obj=None, schema='public', table=None):
629 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 630 col_names = [] 631 col_type = {} 632 for row in rows: 633 col_names.append(row[0]) 634 # map array types 635 if row[1].startswith('_'): 636 col_type[row[0]] = row[1][1:] + '[]' 637 else: 638 col_type[row[0]] = row[1] 639 col_defs = [] 640 col_defs.append(col_names) 641 col_defs.append(col_type) 642 return col_defs
643 #------------------------------------------------------------------------
644 -def get_col_names(link_obj=None, schema='public', table=None):
645 """Return column attributes of table""" 646 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 647 cols = [] 648 for row in rows: 649 cols.append(row[0]) 650 return cols
651 652 #------------------------------------------------------------------------ 653 # i18n functions 654 #------------------------------------------------------------------------
655 -def export_translations_from_database(filename=None):
656 tx_file = codecs.open(filename, 'wb', 'utf8') 657 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 658 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 659 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 660 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 661 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 662 tx_file.write(u'set default_transaction_read_only to off;\n\n') 663 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 664 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 665 666 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 667 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 668 for row in rows: 669 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 670 row['lang'].replace("'", "\\'"), 671 row['orig'].replace("'", "\\'"), 672 row['trans'].replace("'", "\\'") 673 ) 674 tx_file.write(line) 675 tx_file.write(u'\n') 676 677 tx_file.write(u'\set ON_ERROR_STOP 1\n') 678 tx_file.close() 679 680 return True
681 #------------------------------------------------------------------------
682 -def delete_translation_from_database(link_obj=None, language=None, original=None):
683 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 684 args = {'lang': language, 'orig': original} 685 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 686 return True
687 688 #------------------------------------------------------------------------
689 -def update_translation_in_database(language=None, original=None, translation=None):
690 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 691 args = {'lang': language, 'orig': original, 'trans': translation} 692 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 693 return args
694 695 #------------------------------------------------------------------------
696 -def get_translation_languages():
697 rows, idx = run_ro_queries ( 698 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 699 ) 700 return [ r[0] for r in rows ]
701 702 #------------------------------------------------------------------------
703 -def get_database_translations(language=None, order_by=None):
704 705 args = {'lang': language} 706 _log.debug('language [%s]', language) 707 708 if order_by is None: 709 order_by = u'ORDER BY %s' % order_by 710 else: 711 order_by = u'ORDER BY lang, orig' 712 713 if language is None: 714 cmd = u""" 715 SELECT DISTINCT ON (orig, lang) 716 lang, orig, trans 717 FROM (( 718 719 -- strings stored as translation keys whether translated or not 720 SELECT 721 NULL as lang, 722 ik.orig, 723 NULL AS trans 724 FROM 725 i18n.keys ik 726 727 ) UNION ALL ( 728 729 -- already translated strings 730 SELECT 731 it.lang, 732 it.orig, 733 it.trans 734 FROM 735 i18n.translations it 736 737 )) as translatable_strings 738 %s""" % order_by 739 else: 740 cmd = u""" 741 SELECT DISTINCT ON (orig, lang) 742 lang, orig, trans 743 FROM (( 744 745 -- strings stored as translation keys whether translated or not 746 SELECT 747 %%(lang)s as lang, 748 ik.orig, 749 i18n._(ik.orig, %%(lang)s) AS trans 750 FROM 751 i18n.keys ik 752 753 ) UNION ALL ( 754 755 -- already translated strings 756 SELECT 757 %%(lang)s as lang, 758 it.orig, 759 i18n._(it.orig, %%(lang)s) AS trans 760 FROM 761 i18n.translations it 762 763 )) AS translatable_strings 764 %s""" % order_by 765 766 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 767 768 if rows is None: 769 _log.error('no translatable strings found') 770 else: 771 _log.debug('%s translatable strings found', len(rows)) 772 773 return rows
774 775 #------------------------------------------------------------------------
776 -def get_current_user_language():
777 cmd = u'select i18n.get_curr_lang()' 778 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 779 return rows[0][0]
780 781 #------------------------------------------------------------------------
782 -def set_user_language(user=None, language=None):
783 """Set the user language in the database. 784 785 user = None: current db user 786 language = None: unset 787 """ 788 _log.info('setting database language for user [%s] to [%s]', user, language) 789 790 args = { 791 'usr': user, 792 'lang': language 793 } 794 795 if language is None: 796 if user is None: 797 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 798 else: 799 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 800 queries.append({'cmd': u'select True'}) 801 else: 802 if user is None: 803 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 804 else: 805 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 806 807 rows, idx = run_rw_queries(queries = queries, return_data = True) 808 809 if not rows[0][0]: 810 _log.error('cannot set database language to [%s] for user [%s]', language, user) 811 812 return rows[0][0]
813 #------------------------------------------------------------------------
814 -def force_user_language(language=None):
815 """Set the user language in the database. 816 817 - regardless of whether there is any translation available. 818 - only for the current user 819 """ 820 _log.info('forcing database language for current db user to [%s]', language) 821 822 run_rw_queries(queries = [{ 823 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 824 'args': {'lang': language} 825 }])
826 827 # ======================================================================= 828 # query runners and helpers 829 # =======================================================================
830 -def send_maintenance_notification():
831 cmd = u'notify "db_maintenance_warning:"' 832 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
833 #------------------------------------------------------------------------
834 -def send_maintenance_shutdown():
835 cmd = u'notify "db_maintenance_disconnect:"' 836 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
837 #------------------------------------------------------------------------
838 -def is_pg_interval(candidate=None):
839 cmd = u'SELECT %(candidate)s::interval' 840 try: 841 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 842 return True 843 except: 844 cmd = u'SELECT %(candidate)s::text::interval' 845 try: 846 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 847 return True 848 except: 849 return False
850 #------------------------------------------------------------------------
851 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
852 outfile = file(filename, 'wb') 853 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 854 outfile.close() 855 return result
856 #------------------------------------------------------------------------
857 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
858 """Store data from a bytea field into a file. 859 860 <data_query> 861 - dict {'cmd': ..., 'args': ...} 862 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 863 - 'args' must be a dict 864 - must return one row with one field of type bytea 865 <file> 866 - must be a file like Python object 867 <data_size> 868 - integer of the total size of the expected data or None 869 <data_size_query> 870 - dict {'cmd': ..., 'args': ...} 871 - cmd must be unicode 872 - must return one row with one field with the octet_length() of the data field 873 - used only when <data_size> is None 874 """ 875 if data_size == 0: 876 return True 877 878 # If the client sets an encoding other than the default we 879 # will receive encoding-parsed data which isn't the binary 880 # content we want. Hence we need to get our own connection. 881 # It must be a read-write one so that we don't affect the 882 # encoding for other users of the shared read-only 883 # connections. 884 # Actually, encodings shouldn't be applied to binary data 885 # (eg. bytea types) in the first place but that is only 886 # reported to be fixed > v7.4. 887 # further tests reveal that at least on PG 8.0 this bug still 888 # manifests itself 889 conn = get_raw_connection(readonly=True) 890 891 if data_size is None: 892 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 893 data_size = rows[0][0] 894 if data_size in [None, 0]: 895 conn.rollback() 896 return True 897 898 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 899 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 900 901 # chunk size of 0 means "retrieve whole field at once" 902 if chunk_size == 0: 903 chunk_size = data_size 904 _log.debug('chunk size [0] bytes: retrieving all data at once') 905 906 # Windoze sucks: it can't transfer objects of arbitrary size, 907 # anyways, we need to split the transfer, 908 # however, only possible if postgres >= 7.2 909 needed_chunks, remainder = divmod(data_size, chunk_size) 910 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 911 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 912 913 # try setting "bytea_output" 914 # - fails if not necessary 915 # - succeeds if necessary 916 try: 917 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 918 except dbapi.ProgrammingError: 919 _log.debug('failed to set bytea_output to "escape", not necessary') 920 921 # retrieve chunks, skipped if data size < chunk size, 922 # does this not carry the danger of cutting up multi-byte escape sequences ? 923 # no, since bytea is binary, 924 # yes, since in bytea there are *some* escaped values, still 925 # no, since those are only escaped during *transfer*, not on-disk, hence 926 # only complete escape sequences are put on the wire 927 for chunk_id in range(needed_chunks): 928 chunk_start = (chunk_id * chunk_size) + 1 929 data_query['args']['start'] = chunk_start 930 data_query['args']['size'] = chunk_size 931 try: 932 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 933 except: 934 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 935 conn.rollback() 936 raise 937 # it would be a fatal error to see more than one result as ids are supposed to be unique 938 file_obj.write(str(rows[0][0])) 939 940 # retrieve remainder 941 if remainder > 0: 942 chunk_start = (needed_chunks * chunk_size) + 1 943 data_query['args']['start'] = chunk_start 944 data_query['args']['size'] = remainder 945 try: 946 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 947 except: 948 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 949 conn.rollback() 950 raise 951 # it would be a fatal error to see more than one result as ids are supposed to be unique 952 file_obj.write(str(rows[0][0])) 953 954 conn.rollback() 955 return True
956 #------------------------------------------------------------------------
957 -def file2bytea(query=None, filename=None, args=None, conn=None):
958 """Store data from a file into a bytea field. 959 960 The query must: 961 - be in unicode 962 - contain a format spec identifying the row (eg a primary key) 963 matching <args> if it is an UPDATE 964 - contain a format spec %(data)s::bytea 965 """ 966 # read data from file 967 infile = file(filename, "rb") 968 data_as_byte_string = infile.read() 969 infile.close() 970 if args is None: 971 args = {} 972 args['data'] = buffer(data_as_byte_string) 973 del(data_as_byte_string) 974 975 # insert the data 976 if conn is None: 977 conn = get_raw_connection(readonly=False) 978 close_conn = True 979 else: 980 close_conn = False 981 982 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 983 984 if close_conn: 985 conn.close() 986 987 return
988 #------------------------------------------------------------------------
989 -def sanitize_pg_regex(expression=None, escape_all=False):
990 """Escape input for use in a PostgreSQL regular expression. 991 992 If a fragment comes from user input and is to be used 993 as a regular expression we need to make sure it doesn't 994 contain invalid regex patterns such as unbalanced ('s. 995 996 <escape_all> 997 True: try to escape *all* metacharacters 998 False: only escape those which render the regex invalid 999 """ 1000 return expression.replace ( 1001 '(', '\(' 1002 ).replace ( 1003 ')', '\)' 1004 ).replace ( 1005 '[', '\[' 1006 ).replace ( 1007 '+', '\+' 1008 ).replace ( 1009 '.', '\.' 1010 ).replace ( 1011 '*', '\*' 1012 )
1013 #']', '\]', # not needed 1014 #------------------------------------------------------------------------
1015 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1016 """Run read-only queries. 1017 1018 <queries> must be a list of dicts: 1019 [ 1020 {'cmd': <string>, 'args': <dict> or <tuple>}, 1021 {...}, 1022 ... 1023 ] 1024 """ 1025 if isinstance(link_obj, dbapi._psycopg.cursor): 1026 curs = link_obj 1027 curs_close = __noop 1028 tx_rollback = __noop 1029 elif isinstance(link_obj, dbapi._psycopg.connection): 1030 curs = link_obj.cursor() 1031 curs_close = curs.close 1032 tx_rollback = link_obj.rollback 1033 elif link_obj is None: 1034 conn = get_connection(readonly=True, verbose=verbose) 1035 curs = conn.cursor() 1036 curs_close = curs.close 1037 tx_rollback = conn.rollback 1038 else: 1039 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1040 1041 if verbose: 1042 _log.debug('cursor: %s', curs) 1043 1044 for query in queries: 1045 if type(query['cmd']) is not types.UnicodeType: 1046 print "run_ro_queries(): non-unicode query" 1047 print query['cmd'] 1048 try: 1049 args = query['args'] 1050 except KeyError: 1051 args = None 1052 try: 1053 curs.execute(query['cmd'], args) 1054 if verbose: 1055 _log.debug('ran query: [%s]', curs.query) 1056 if curs.statusmessage != u'': 1057 _log.debug('PG status message: %s', curs.statusmessage) 1058 _log.debug('cursor description: %s', str(curs.description)) 1059 except dbapi.Error as pg_exc: 1060 _log.error('query failed: [%s]', curs.query) 1061 if curs.statusmessage != u'': 1062 _log.error('PG status message: %s', curs.statusmessage) 1063 _log.error('PG error code: %s', pg_exc.pgcode) 1064 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1065 try: 1066 curs_close() 1067 except dbapi.InterfaceError: 1068 _log.exception('cannot close cursor') 1069 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1070 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1071 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1072 if curs.statusmessage != u'': 1073 details = u'Status: %s\n%s' % ( 1074 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1075 details 1076 ) 1077 raise gmExceptions.AccessDenied ( 1078 u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')), 1079 source = u'PostgreSQL', 1080 code = pg_exc.pgcode, 1081 details = details 1082 ) 1083 raise 1084 except: 1085 _log.error('query failed: [%s]', curs.query) 1086 if curs.statusmessage != u'': 1087 _log.error('PG status message: %s', curs.statusmessage) 1088 try: 1089 curs_close() 1090 except dbapi.InterfaceError: 1091 _log.exception('cannot close cursor') 1092 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1093 raise 1094 1095 data = None 1096 col_idx = None 1097 if return_data: 1098 data = curs.fetchall() 1099 if verbose: 1100 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1101 _log.debug('cursor description: %s', str(curs.description)) 1102 if get_col_idx: 1103 col_idx = get_col_indices(curs) 1104 1105 curs_close() 1106 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1107 return (data, col_idx)
1108 #------------------------------------------------------------------------
1109 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1110 """Convenience function for running a transaction 1111 that is supposed to get committed. 1112 1113 <link_obj> 1114 can be either: 1115 - a cursor 1116 - a connection 1117 1118 <queries> 1119 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1120 to be executed as a single transaction, the last 1121 query may usefully return rows (such as a 1122 "SELECT currval('some_sequence')" statement) 1123 1124 <end_tx> 1125 - controls whether the transaction is finalized (eg. 1126 committed/rolled back) or not, this allows the 1127 call to run_rw_queries() to be part of a framing 1128 transaction 1129 - if link_obj is a connection then <end_tx> will 1130 default to False unless it is explicitly set to 1131 True which is taken to mean "yes, you do have full 1132 control over the transaction" in which case the 1133 transaction is properly finalized 1134 - if link_obj is a cursor we CANNOT finalize the 1135 transaction because we would need the connection for that 1136 - if link_obj is None <end_tx> will, of course, always be True 1137 1138 <return_data> 1139 - if true, the returned data will include the rows 1140 the last query selected 1141 - if false, it returns None instead 1142 1143 <get_col_idx> 1144 - if true, the returned data will include a dictionary 1145 mapping field names to column positions 1146 - if false, the returned data returns None instead 1147 1148 method result: 1149 - returns a tuple (data, idx) 1150 - <data>: 1151 * (None, None) if last query did not return rows 1152 * ("fetchall() result", <index>) if last query returned any rows 1153 * for <index> see <get_col_idx> 1154 """ 1155 if isinstance(link_obj, dbapi._psycopg.cursor): 1156 conn_close = __noop 1157 conn_commit = __noop 1158 tx_rollback = __noop 1159 curs = link_obj 1160 curs_close = __noop 1161 elif isinstance(link_obj, dbapi._psycopg.connection): 1162 conn_close = __noop 1163 if end_tx: 1164 conn_commit = link_obj.commit 1165 tx_rollback = link_obj.rollback 1166 else: 1167 conn_commit = __noop 1168 tx_rollback = __noop 1169 curs = link_obj.cursor() 1170 curs_close = curs.close 1171 elif link_obj is None: 1172 conn = get_connection(readonly=False) 1173 conn_close = conn.close 1174 conn_commit = conn.commit 1175 tx_rollback = conn.rollback 1176 curs = conn.cursor() 1177 curs_close = curs.close 1178 else: 1179 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1180 1181 for query in queries: 1182 if type(query['cmd']) is not types.UnicodeType: 1183 print "run_rw_queries(): non-unicode query" 1184 print query['cmd'] 1185 try: 1186 args = query['args'] 1187 except KeyError: 1188 args = None 1189 try: 1190 curs.execute(query['cmd'], args) 1191 except dbapi.Error as pg_exc: 1192 _log.error('RW query failed: [%s]', curs.query) 1193 if curs.statusmessage != u'': 1194 _log.error('PG status message: %s', curs.statusmessage) 1195 _log.error('PG error code: %s', pg_exc.pgcode) 1196 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1197 try: 1198 curs_close() 1199 tx_rollback() # just for good measure 1200 conn_close() 1201 except dbapi.InterfaceError: 1202 _log.exception('cannot cleanup') 1203 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1204 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1205 if curs.statusmessage != u'': 1206 details = u'Status: %s\n%s' % ( 1207 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1208 details 1209 ) 1210 raise gmExceptions.AccessDenied ( 1211 u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')), 1212 source = u'PostgreSQL', 1213 code = pg_exc.pgcode, 1214 details = details 1215 ) 1216 raise 1217 except: 1218 _log.exception('error running RW query') 1219 gmLog2.log_stack_trace() 1220 try: 1221 curs_close() 1222 tx_rollback() 1223 conn_close() 1224 except dbapi.InterfaceError: 1225 _log.exception('cannot cleanup') 1226 raise 1227 raise 1228 1229 data = None 1230 col_idx = None 1231 if return_data: 1232 try: 1233 data = curs.fetchall() 1234 except: 1235 _log.exception('error fetching data from RW query') 1236 gmLog2.log_stack_trace() 1237 try: 1238 curs_close() 1239 tx_rollback() 1240 conn_close() 1241 except dbapi.InterfaceError: 1242 _log.exception('cannot cleanup') 1243 raise 1244 raise 1245 if get_col_idx: 1246 col_idx = get_col_indices(curs) 1247 1248 curs_close() 1249 conn_commit() 1250 conn_close() 1251 1252 return (data, col_idx)
1253 #------------------------------------------------------------------------
1254 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1255 """Generates SQL for an INSERT query. 1256 1257 values: dict of values keyed by field to insert them into 1258 """ 1259 if schema is None: 1260 schema = u'public' 1261 1262 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1263 val_snippets = [] 1264 for field in fields: 1265 val_snippets.append(u'%%(%s)s' % field) 1266 1267 if returning is None: 1268 returning = u'' 1269 return_data = False 1270 else: 1271 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1272 return_data = True 1273 1274 cmd = u"""\nINSERT INTO %s.%s ( 1275 %s 1276 ) VALUES ( 1277 %s 1278 )%s""" % ( 1279 schema, 1280 table, 1281 u',\n\t\t'.join(fields), 1282 u',\n\t\t'.join(val_snippets), 1283 returning 1284 ) 1285 1286 _log.debug(u'running SQL: >>>%s<<<', cmd) 1287 1288 return run_rw_queries ( 1289 link_obj = link_obj, 1290 queries = [{'cmd': cmd, 'args': values}], 1291 end_tx = end_tx, 1292 return_data = return_data, 1293 get_col_idx = get_col_idx, 1294 verbose = verbose 1295 )
1296 # ======================================================================= 1297 # connection handling API 1298 # -----------------------------------------------------------------------
1299 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1300 """ 1301 GNUmed database connection pool. 1302 1303 Extends psycopg2's PersistentConnectionPool with 1304 a custom _connect() function. Supports one connection 1305 per thread - which also ties it to one particular DSN. 1306 """ 1307 #--------------------------------------------------
1308 - def _connect(self, key=None):
1309 1310 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1311 1312 conn.original_close = conn.close 1313 conn.close = _raise_exception_on_ro_conn_close 1314 1315 if key is not None: 1316 self._used[key] = conn 1317 self._rused[id(conn)] = key 1318 else: 1319 self._pool.append(conn) 1320 1321 return conn
1322 #--------------------------------------------------
1323 - def shutdown(self):
1324 for conn_key in self._used.keys(): 1325 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1326 self._used[conn_key].original_close()
1327 # -----------------------------------------------------------------------
1328 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1329 """Get a raw, unadorned connection. 1330 1331 - this will not set any parameters such as encoding, timezone, datestyle 1332 - the only requirement is a valid DSN 1333 - hence it can be used for "service" connections 1334 for verifying encodings etc 1335 """ 1336 # FIXME: support verbose 1337 if dsn is None: 1338 dsn = get_default_dsn() 1339 1340 if u'host=salaam.homeunix' in dsn: 1341 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1342 1343 try: 1344 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1345 except dbapi.OperationalError, e: 1346 1347 t, v, tb = sys.exc_info() 1348 try: 1349 msg = e.args[0] 1350 except (AttributeError, IndexError, TypeError): 1351 raise 1352 1353 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1354 1355 if msg.find('fe_sendauth') != -1: 1356 raise cAuthenticationError, (dsn, msg), tb 1357 1358 if regex.search('user ".*" does not exist', msg) is not None: 1359 raise cAuthenticationError, (dsn, msg), tb 1360 1361 if msg.find('uthenti') != -1: 1362 raise cAuthenticationError, (dsn, msg), tb 1363 1364 raise 1365 1366 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1367 1368 # do first-time stuff 1369 global postgresql_version 1370 if postgresql_version is None: 1371 curs = conn.cursor() 1372 curs.execute(""" 1373 SELECT 1374 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1375 FROM 1376 pg_settings 1377 WHERE 1378 name = 'server_version' 1379 """) 1380 postgresql_version = curs.fetchone()['version'] 1381 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1382 try: 1383 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1384 _log.info('database size: %s', curs.fetchone()[0]) 1385 except: 1386 pass 1387 if verbose: 1388 __log_PG_settings(curs=curs) 1389 curs.close() 1390 conn.commit() 1391 1392 if _default_client_timezone is None: 1393 __detect_client_timezone(conn = conn) 1394 1395 curs = conn.cursor() 1396 1397 # set access mode 1398 if readonly: 1399 _log.debug('access mode [READ ONLY]') 1400 cmd = 'set session characteristics as transaction READ ONLY' 1401 curs.execute(cmd) 1402 cmd = 'set default_transaction_read_only to on' 1403 curs.execute(cmd) 1404 else: 1405 _log.debug('access mode [READ WRITE]') 1406 cmd = 'set session characteristics as transaction READ WRITE' 1407 curs.execute(cmd) 1408 cmd = 'set default_transaction_read_only to off' 1409 curs.execute(cmd) 1410 1411 curs.close() 1412 conn.commit() 1413 1414 conn.is_decorated = False 1415 1416 return conn
1417 # =======================================================================
1418 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1419 """Get a new connection. 1420 1421 This assumes the locale system has been initialized 1422 unless an encoding is specified. 1423 """ 1424 # FIXME: support pooled on RW, too 1425 # FIXME: for now, support the default DSN only 1426 if pooled and readonly and (dsn is None): 1427 global __ro_conn_pool 1428 if __ro_conn_pool is None: 1429 __ro_conn_pool = cConnectionPool ( 1430 minconn = 1, 1431 maxconn = 2, 1432 dsn = dsn, 1433 verbose = verbose 1434 ) 1435 conn = __ro_conn_pool.getconn() 1436 else: 1437 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1438 1439 if conn.is_decorated: 1440 return conn 1441 1442 if encoding is None: 1443 encoding = _default_client_encoding 1444 if encoding is None: 1445 encoding = gmI18N.get_encoding() 1446 _log.warning('client encoding not specified') 1447 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1448 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1449 1450 # set connection properties 1451 # - client encoding 1452 try: 1453 conn.set_client_encoding(encoding) 1454 except dbapi.OperationalError: 1455 t, v, tb = sys.exc_info() 1456 if str(v).find("can't set encoding to") != -1: 1457 raise cEncodingError, (encoding, v), tb 1458 raise 1459 1460 # - transaction isolation level 1461 if readonly: 1462 iso_level = u'read committed' 1463 else: 1464 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1465 iso_level = u'serializable' 1466 1467 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1468 1469 curs = conn.cursor() 1470 1471 # - client time zone 1472 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1473 1474 conn.commit() 1475 1476 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1477 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1478 # 1479 # - bytea data format 1480 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1481 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1482 # as the transmission mode for bytea output, 1483 # so try to set this setting back to "escape", 1484 # if that's not possible the reason will be that PG < 9.0 does not support 1485 # that setting - which also means we don't need it and can ignore the 1486 # failure 1487 cmd = "set bytea_output to 'escape'" 1488 try: 1489 curs.execute(cmd) 1490 except dbapi.ProgrammingError: 1491 _log.error('cannot set bytea_output format') 1492 1493 curs.close() 1494 conn.commit() 1495 1496 conn.is_decorated = True 1497 1498 return conn
1499 #-----------------------------------------------------------------------
1500 -def shutdown():
1501 if __ro_conn_pool is None: 1502 return 1503 __ro_conn_pool.shutdown()
1504 # ====================================================================== 1505 # internal helpers 1506 #-----------------------------------------------------------------------
1507 -def __noop():
1508 pass
1509 #-----------------------------------------------------------------------
1510 -def _raise_exception_on_ro_conn_close():
1511 raise TypeError(u'close() called on read-only connection')
1512 #-----------------------------------------------------------------------
1513 -def log_database_access(action=None):
1514 run_insert ( 1515 schema = u'gm', 1516 table = u'access_log', 1517 values = {u'user_action': action}, 1518 end_tx = True 1519 )
1520 #-----------------------------------------------------------------------
1521 -def sanity_check_time_skew(tolerance=60):
1522 """Check server time and local time to be within 1523 the given tolerance of each other. 1524 1525 tolerance: seconds 1526 """ 1527 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1528 1529 cmd = u"SELECT now() at time zone 'UTC'" 1530 conn = get_raw_connection(readonly=True) 1531 curs = conn.cursor() 1532 1533 start = time.time() 1534 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1535 end = time.time() 1536 client_now_as_utc = pydt.datetime.utcnow() 1537 1538 curs.close() 1539 conn.commit() 1540 1541 server_now_as_utc = rows[0][0] 1542 query_duration = end - start 1543 _log.info('server "now" (UTC): %s', server_now_as_utc) 1544 _log.info('client "now" (UTC): %s', client_now_as_utc) 1545 _log.debug('wire roundtrip (seconds): %s', query_duration) 1546 1547 if query_duration > tolerance: 1548 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1549 return False 1550 1551 if server_now_as_utc > client_now_as_utc: 1552 real_skew = server_now_as_utc - client_now_as_utc 1553 else: 1554 real_skew = client_now_as_utc - server_now_as_utc 1555 1556 _log.debug('client/server time skew: %s', real_skew) 1557 1558 if real_skew > pydt.timedelta(seconds = tolerance): 1559 _log.error('client/server time skew > tolerance') 1560 return False 1561 1562 return True
1563 #-----------------------------------------------------------------------
1564 -def sanity_check_database_settings():
1565 """Checks database settings. 1566 1567 returns (status, message) 1568 status: 1569 0: no problem 1570 1: non-fatal problem 1571 2: fatal problem 1572 """ 1573 _log.debug('checking database settings') 1574 1575 conn = get_connection() 1576 1577 # - version string 1578 global postgresql_version_string 1579 if postgresql_version_string is None: 1580 curs = conn.cursor() 1581 curs.execute('SELECT version()') 1582 postgresql_version_string = curs.fetchone()['version'] 1583 curs.close() 1584 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1585 1586 options2check = { 1587 # setting: [expected value, risk, fatal?] 1588 u'allow_system_table_mods': [u'off', u'system breakage', False], 1589 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1590 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1591 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1592 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1593 u'fsync': [u'on', u'data loss/corruption', True], 1594 u'full_page_writes': [u'on', u'data loss/corruption', False], 1595 u'lc_messages': [u'C', u'suboptimal error detection', False], 1596 u'password_encryption': [u'on', u'breach of confidentiality', False], 1597 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1598 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1599 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1600 } 1601 1602 from Gnumed.pycommon import gmCfg2 1603 _cfg = gmCfg2.gmCfgData() 1604 if _cfg.get(option = u'hipaa'): 1605 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1606 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1607 else: 1608 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1609 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1610 1611 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1612 rows, idx = run_ro_queries ( 1613 link_obj = conn, 1614 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1615 get_col_idx = False 1616 ) 1617 1618 found_error = False 1619 found_problem = False 1620 msg = [] 1621 for row in rows: 1622 option = row['name'] 1623 value_found = row['setting'] 1624 value_expected = options2check[option][0] 1625 risk = options2check[option][1] 1626 fatal_setting = options2check[option][2] 1627 if value_found != value_expected: 1628 if fatal_setting is True: 1629 found_error = True 1630 elif fatal_setting is False: 1631 found_problem = True 1632 elif fatal_setting is None: 1633 pass 1634 else: 1635 _log.error(options2check[option]) 1636 raise ValueError(u'invalid database configuration sanity check') 1637 msg.append(_(' option [%s]: %s') % (option, value_found)) 1638 msg.append(_(' risk: %s') % risk) 1639 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1640 1641 if found_error: 1642 return 2, u'\n'.join(msg) 1643 1644 if found_problem: 1645 return 1, u'\n'.join(msg) 1646 1647 return 0, u''
1648 #------------------------------------------------------------------------
1649 -def __log_PG_settings(curs=None):
1650 # don't use any of the run_*()s since that might 1651 # create a loop if we fail here 1652 # FIXME: use pg_settings 1653 try: 1654 curs.execute(u'show all') 1655 except: 1656 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1657 return False 1658 settings = curs.fetchall() 1659 if settings is None: 1660 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1661 return False 1662 for setting in settings: 1663 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1664 return True
1665 # =======================================================================
1666 -def extract_msg_from_pg_exception(exc=None):
1667 1668 try: 1669 msg = exc.args[0] 1670 except (AttributeError, IndexError, TypeError): 1671 return u'cannot extract message from exception' 1672 1673 return unicode(msg, gmI18N.get_encoding(), 'replace')
1674 # =======================================================================
1675 -class cAuthenticationError(dbapi.OperationalError):
1676
1677 - def __init__(self, dsn=None, prev_val=None):
1678 self.dsn = dsn 1679 self.prev_val = prev_val
1680
1681 - def __str__(self):
1682 _log.warning('%s.__str__() called', self.__class__.__name__) 1683 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1684 _log.error(tmp) 1685 return tmp.encode(gmI18N.get_encoding(), 'replace')
1686
1687 - def __unicode__(self):
1688 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1689 1690 # ======================================================================= 1691 # custom psycopg2 extensions 1692 # =======================================================================
1693 -class cEncodingError(dbapi.OperationalError):
1694
1695 - def __init__(self, encoding=None, prev_val=None):
1696 self.encoding = encoding 1697 self.prev_val = prev_val
1698
1699 - def __str__(self):
1700 _log.warning('%s.__str__() called', self.__class__.__name__) 1701 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1702
1703 - def __unicode__(self):
1704 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1705 1706 # ----------------------------------------------------------------------- 1707 # Python -> PostgreSQL 1708 # ----------------------------------------------------------------------- 1709 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1710 -class cAdapterPyDateTime(object):
1711
1712 - def __init__(self, dt):
1713 if dt.tzinfo is None: 1714 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1715 self.__dt = dt
1716
1717 - def getquoted(self):
1718 return _timestamp_template % self.__dt.isoformat()
1719 1720 ## remove for 0.9 1721 ## ---------------------------------------------------------------------- 1722 ##class cAdapterMxDateTime(object): 1723 ## 1724 ## def __init__(self, dt): 1725 ## if dt.tz == '???': 1726 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1727 ## self.__dt = dt 1728 ## 1729 ## def getquoted(self): 1730 ## # under some locale settings the mx.DateTime ISO formatter 1731 ## # will insert "," into the ISO string, 1732 ## # while this is allowed per the ISO8601 spec PostgreSQL 1733 ## # cannot currently handle that, 1734 ## # so map those "," to "." to make things work: 1735 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1736 ## 1737 ## ---------------------------------------------------------------------- 1738 ## PostgreSQL -> Python 1739 ## ---------------------------------------------------------------------- 1740 1741 #======================================================================= 1742 # main 1743 #----------------------------------------------------------------------- 1744 1745 # make sure psycopg2 knows how to handle unicode ... 1746 # intended to become standard 1747 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1748 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1749 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1750 1751 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1752 # check in 0.9: 1753 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1754 1755 ## remove for 0.9 1756 #try: 1757 # import mx.DateTime as mxDT 1758 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1759 #except ImportError: 1760 # _log.warning('cannot import mx.DateTime') 1761 1762 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1763 # them adapted to "... ARRAY[]..." so we can support PG arrays 1764 1765 #======================================================================= 1766 if __name__ == "__main__": 1767 1768 if len(sys.argv) < 2: 1769 sys.exit() 1770 1771 if sys.argv[1] != 'test': 1772 sys.exit() 1773 1774 logging.basicConfig(level=logging.DEBUG) 1775 #--------------------------------------------------------------------
1776 - def test_file2bytea():
1777 run_rw_queries(queries = [ 1778 {'cmd': u'create table test_bytea (data bytea)'} 1779 ]) 1780 1781 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1782 try: 1783 file2bytea(query = cmd, filename = sys.argv[2]) 1784 except: 1785 _log.exception('error') 1786 1787 run_rw_queries(queries = [ 1788 {'cmd': u'drop table test_bytea'} 1789 ])
1790 #--------------------------------------------------------------------
1791 - def test_get_connection():
1792 print "testing get_connection()" 1793 1794 dsn = 'foo' 1795 try: 1796 conn = get_connection(dsn=dsn) 1797 except dbapi.OperationalError, e: 1798 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1799 t, v = sys.exc_info()[:2] 1800 print ' ', t 1801 print ' ', v 1802 1803 dsn = 'dbname=gnumed_v9' 1804 try: 1805 conn = get_connection(dsn=dsn) 1806 except cAuthenticationError: 1807 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1808 t, v = sys.exc_info()[:2] 1809 print ' ', t 1810 print ' ', v 1811 1812 dsn = 'dbname=gnumed_v9 user=abc' 1813 try: 1814 conn = get_connection(dsn=dsn) 1815 except cAuthenticationError: 1816 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1817 t, v = sys.exc_info()[:2] 1818 print ' ', t 1819 print ' ', v 1820 1821 dsn = 'dbname=gnumed_v9 user=any-doc' 1822 try: 1823 conn = get_connection(dsn=dsn) 1824 except cAuthenticationError: 1825 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1826 t, v = sys.exc_info()[:2] 1827 print ' ', t 1828 print ' ', v 1829 1830 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1831 try: 1832 conn = get_connection(dsn=dsn) 1833 except cAuthenticationError: 1834 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1835 t, v = sys.exc_info()[:2] 1836 print ' ', t 1837 print ' ', v 1838 1839 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1840 conn = get_connection(dsn=dsn, readonly=True) 1841 1842 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1843 conn = get_connection(dsn=dsn, readonly=False) 1844 1845 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1846 encoding = 'foo' 1847 try: 1848 conn = get_connection(dsn=dsn, encoding=encoding) 1849 except cEncodingError: 1850 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1851 t, v = sys.exc_info()[:2] 1852 print ' ', t 1853 print ' ', v
1854 #--------------------------------------------------------------------
1855 - def test_exceptions():
1856 print "testing exceptions" 1857 1858 try: 1859 raise cAuthenticationError('no dsn', 'no previous exception') 1860 except cAuthenticationError: 1861 t, v, tb = sys.exc_info() 1862 print t 1863 print v 1864 print tb 1865 1866 try: 1867 raise cEncodingError('no dsn', 'no previous exception') 1868 except cEncodingError: 1869 t, v, tb = sys.exc_info() 1870 print t 1871 print v 1872 print tb
1873 #--------------------------------------------------------------------
1874 - def test_ro_queries():
1875 print "testing run_ro_queries()" 1876 1877 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1878 conn = get_connection(dsn, readonly=True) 1879 1880 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1881 print data 1882 print idx 1883 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1884 print data 1885 print idx 1886 1887 curs = conn.cursor() 1888 1889 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1890 print data 1891 print idx 1892 1893 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1894 print data 1895 print idx 1896 1897 try: 1898 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1899 print data 1900 print idx 1901 except psycopg2.ProgrammingError: 1902 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1903 t, v = sys.exc_info()[:2] 1904 print ' ', t 1905 print ' ', v 1906 1907 curs.close()
1908 #--------------------------------------------------------------------
1909 - def test_request_dsn():
1910 conn = get_connection() 1911 print conn 1912 conn.close()
1913 #--------------------------------------------------------------------
1914 - def test_set_encoding():
1915 print "testing set_default_client_encoding()" 1916 1917 enc = 'foo' 1918 try: 1919 set_default_client_encoding(enc) 1920 print "SUCCESS: encoding [%s] worked" % enc 1921 except ValueError: 1922 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1923 t, v = sys.exc_info()[:2] 1924 print ' ', t 1925 print ' ', v 1926 1927 enc = '' 1928 try: 1929 set_default_client_encoding(enc) 1930 print "SUCCESS: encoding [%s] worked" % enc 1931 except ValueError: 1932 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1933 t, v = sys.exc_info()[:2] 1934 print ' ', t 1935 print ' ', v 1936 1937 enc = 'latin1' 1938 try: 1939 set_default_client_encoding(enc) 1940 print "SUCCESS: encoding [%s] worked" % enc 1941 except ValueError: 1942 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1943 t, v = sys.exc_info()[:2] 1944 print ' ', t 1945 print ' ', v 1946 1947 enc = 'utf8' 1948 try: 1949 set_default_client_encoding(enc) 1950 print "SUCCESS: encoding [%s] worked" % enc 1951 except ValueError: 1952 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1953 t, v = sys.exc_info()[:2] 1954 print ' ', t 1955 print ' ', v 1956 1957 enc = 'unicode' 1958 try: 1959 set_default_client_encoding(enc) 1960 print "SUCCESS: encoding [%s] worked" % enc 1961 except ValueError: 1962 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1963 t, v = sys.exc_info()[:2] 1964 print ' ', t 1965 print ' ', v 1966 1967 enc = 'UNICODE' 1968 try: 1969 set_default_client_encoding(enc) 1970 print "SUCCESS: encoding [%s] worked" % enc 1971 except ValueError: 1972 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1973 t, v = sys.exc_info()[:2] 1974 print ' ', t 1975 print ' ', v
1976 #--------------------------------------------------------------------
1977 - def test_connection_pool():
1978 dsn = get_default_dsn() 1979 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 1980 print pool 1981 print pool.getconn() 1982 print pool.getconn() 1983 print pool.getconn() 1984 print type(pool.getconn())
1985 #--------------------------------------------------------------------
1986 - def test_list_args():
1987 dsn = get_default_dsn() 1988 conn = get_connection(dsn, readonly=True) 1989 curs = conn.cursor() 1990 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
1991 #--------------------------------------------------------------------
1992 - def test_sanitize_pg_regex():
1993 tests = [ 1994 ['(', '\\('] 1995 , ['[', '\\['] 1996 , [')', '\\)'] 1997 ] 1998 for test in tests: 1999 result = sanitize_pg_regex(test[0]) 2000 if result != test[1]: 2001 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2002 #--------------------------------------------------------------------
2003 - def test_is_pg_interval():
2004 status = True 2005 tests = [ 2006 [None, True], # None == NULL == succeeds ! 2007 [1, True], 2008 ['1', True], 2009 ['abc', False] 2010 ] 2011 2012 if not is_pg_interval(): 2013 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2014 status = False 2015 2016 for test in tests: 2017 result = is_pg_interval(test[0]) 2018 if result != test[1]: 2019 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2020 status = False 2021 2022 return status
2023 #--------------------------------------------------------------------
2024 - def test_sanity_check_time_skew():
2025 sanity_check_time_skew()
2026 #--------------------------------------------------------------------
2027 - def test_get_foreign_key_details():
2028 for row in get_foreign_keys2column ( 2029 schema = u'dem', 2030 table = u'identity', 2031 column = u'pk' 2032 ): 2033 print '%s.%s references %s.%s.%s' % ( 2034 row['referencing_table'], 2035 row['referencing_column'], 2036 row['referenced_schema'], 2037 row['referenced_table'], 2038 row['referenced_column'] 2039 )
2040 #--------------------------------------------------------------------
2041 - def test_set_user_language():
2042 # (user, language, result, exception type) 2043 tests = [ 2044 # current user 2045 [None, 'de_DE', True], 2046 [None, 'lang_w/o_tx', False], 2047 [None, None, True], 2048 # valid user 2049 ['any-doc', 'de_DE', True], 2050 ['any-doc', 'lang_w/o_tx', False], 2051 ['any-doc', None, True], 2052 # invalid user 2053 ['invalid user', 'de_DE', None], 2054 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2055 ['invalid user', None, True] 2056 ] 2057 for test in tests: 2058 try: 2059 result = set_user_language(user = test[0], language = test[1]) 2060 if result != test[2]: 2061 print "test:", test 2062 print "result:", result, "expected:", test[2] 2063 except psycopg2.IntegrityError, e: 2064 if test[2] is None: 2065 continue 2066 print "test:", test 2067 print "expected exception" 2068 print "result:", e
2069 #--------------------------------------------------------------------
2070 - def test_get_schema_revision_history():
2071 for line in get_schema_revision_history(): 2072 print u' - '.join(line)
2073 #--------------------------------------------------------------------
2074 - def test_run_query():
2075 gmDateTime.init() 2076 args = {'dt': gmDateTime.pydt_max_here()} 2077 cmd = u"SELECT %(dt)s" 2078 2079 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2080 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2081 print rows
2082 #--------------------------------------------------------------------
2083 - def test_schema_exists():
2084 print schema_exists()
2085 2086 #-------------------------------------------------------------------- 2087 # run tests 2088 #test_file2bytea() 2089 #test_get_connection() 2090 #test_exceptions() 2091 #test_ro_queries() 2092 #test_request_dsn() 2093 #test_set_encoding() 2094 #test_connection_pool() 2095 #test_list_args() 2096 #test_sanitize_pg_regex() 2097 #test_is_pg_interval() 2098 #test_sanity_check_time_skew() 2099 #test_get_foreign_key_details() 2100 #test_set_user_language() 2101 #test_get_schema_revision_history() 2102 #test_run_query() 2103 test_schema_exists() 2104 2105 # ====================================================================== 2106