1 """GNUmed PostgreSQL connection handling.
2
3 TODO: iterator/generator batch fetching:
4 - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad
5 - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4"
6
7 winner:
8 def resultset_functional_batchgenerator(cursor, size=100):
9 for results in iter(lambda: cursor.fetchmany(size), []):
10 for rec in results:
11 yield rec
12 """
13
14
15 __version__ = "$Revision: 1.127 $"
16 __author__ = "K.Hilbert <Karsten.Hilbert@gmx.net>"
17 __license__ = 'GPL (details at http://www.gnu.org)'
18
19
20
21 import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging, locale
22
23
24 if __name__ == '__main__':
25 sys.path.insert(0, '../../')
26 from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2
27
28 _log = logging.getLogger('gm.db')
29 _log.info(__version__)
30
31
32 try:
33 import psycopg2 as dbapi
34 except ImportError:
35 _log.exception("Python database adapter psycopg2 not found.")
36 print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server."
37 raise
38
39
40
41 _log.info('psycopg2 version: %s' % dbapi.__version__)
42 _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle))
43 if not (float(dbapi.apilevel) >= 2.0):
44 raise ImportError('gmPG2: supported DB-API level too low')
45 if not (dbapi.threadsafety > 0):
46 raise ImportError('gmPG2: lacking minimum thread safety in psycopg2')
47 if not (dbapi.paramstyle == 'pyformat'):
48 raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2')
49 try:
50 dbapi.__version__.index('dt')
51 except ValueError:
52 raise ImportError('gmPG2: lacking datetime support in psycopg2')
53 try:
54 dbapi.__version__.index('ext')
55 except ValueError:
56 raise ImportError('gmPG2: lacking extensions support in psycopg2')
57 try:
58 dbapi.__version__.index('pq3')
59 except ValueError:
60 raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2')
61
62 import psycopg2.extras
63 import psycopg2.extensions
64 import psycopg2.pool
65 import psycopg2.errorcodes as sql_error_codes
66
67
68 _default_client_encoding = 'UTF8'
69 _log.info('assuming default client encoding of [%s]' % _default_client_encoding)
70
71
72 _default_client_timezone = None
73 _sql_set_timezone = None
74 _timestamp_template = "cast('%s' as timestamp with time zone)"
75 FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone
76
77 _default_dsn = None
78 _default_login = None
79
80 postgresql_version_string = None
81 postgresql_version = None
82
83 __ro_conn_pool = None
84
85
86
87
88
89 known_schema_hashes = {
90 'devel': 'not released, testing only',
91 'v2': 'b09d50d7ed3f91ddf4c4ddb8ea507720',
92 'v3': 'e73718eaf230d8f1d2d01afa8462e176',
93 'v4': '4428ccf2e54c289136819e701bb095ea',
94 'v5': '7e7b093af57aea48c288e76632a382e5',
95 'v6': '90e2026ac2efd236da9c8608b8685b2d',
96 'v7': '6c9f6d3981483f8e9433df99d1947b27',
97 'v8': '89b13a7af83337c3aad153b717e52360',
98 'v9': '641a9b2be3c378ffc2bb2f0b1c9f051d',
99 'v10': '7ef42a8fb2bd929a2cdd0c63864b4e8a',
100 'v11': '03042ae24f3f92877d986fb0a6184d76',
101 'v12': '06183a6616db62257e22814007a8ed07',
102 'v13': 'fab7c1ae408a6530c47f9b5111a0841e'
103 }
104
105 map_schema_hash2version = {
106 'b09d50d7ed3f91ddf4c4ddb8ea507720': 'v2',
107 'e73718eaf230d8f1d2d01afa8462e176': 'v3',
108 '4428ccf2e54c289136819e701bb095ea': 'v4',
109 '7e7b093af57aea48c288e76632a382e5': 'v5',
110 '90e2026ac2efd236da9c8608b8685b2d': 'v6',
111 '6c9f6d3981483f8e9433df99d1947b27': 'v7',
112 '89b13a7af83337c3aad153b717e52360': 'v8',
113 '641a9b2be3c378ffc2bb2f0b1c9f051d': 'v9',
114 '7ef42a8fb2bd929a2cdd0c63864b4e8a': 'v10',
115 '03042ae24f3f92877d986fb0a6184d76': 'v11',
116 '06183a6616db62257e22814007a8ed07': 'v12',
117 'fab7c1ae408a6530c47f9b5111a0841e': 'v13'
118 }
119
120 map_client_branch2required_db_version = {
121 u'GIT tree': u'devel',
122 u'0.3': u'v9',
123 u'0.4': u'v10',
124 u'0.5': u'v11',
125 u'0.6': u'v12',
126 u'0.7': u'v13'
127 }
128
129
130 query_table_col_defs = u"""select
131 cols.column_name,
132 cols.udt_name
133 from
134 information_schema.columns cols
135 where
136 cols.table_schema = %s
137 and
138 cols.table_name = %s
139 order by
140 cols.ordinal_position"""
141
142 query_table_attributes = u"""select
143 cols.column_name
144 from
145 information_schema.columns cols
146 where
147 cols.table_schema = %s
148 and
149 cols.table_name = %s
150 order by
151 cols.ordinal_position"""
152
153
154
155
157
158 if encoding not in psycopg2.extensions.encodings:
159 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding)
160
161 py_enc = psycopg2.extensions.encodings[encoding]
162 try:
163 codecs.lookup(py_enc)
164 except LookupError:
165 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc))
166 raise
167
168
169
170
171 global _default_client_encoding
172 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding)))
173 _default_client_encoding = encoding
174 return True
175
187
189
190 _log.debug(u'validating time zone [%s]', timezone)
191
192 cmd = u'set timezone to %(tz)s'
193 args = {u'tz': timezone}
194
195 conn.commit()
196 curs = conn.cursor()
197 is_valid = False
198 try:
199 curs.execute(cmd, args)
200 _log.info(u'time zone [%s] is settable', timezone)
201
202 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone"""
203 try:
204 curs.execute(cmd)
205 curs.fetchone()
206 _log.info(u'time zone [%s] is usable', timezone)
207 is_valid = True
208 except:
209 _log.error('error using time zone [%s]', timezone)
210 except dbapi.DataError:
211 _log.warning(u'time zone [%s] is not settable', timezone)
212 except:
213 _log.error(u'failed to set time zone to [%s]', timezone)
214 _log.exception(u'')
215
216 curs.close()
217 conn.rollback()
218
219 return is_valid
220
222 """some timezone defs are abbreviations so try to expand
223 them because "set time zone" doesn't take abbreviations"""
224
225 cmd = u"""
226 select distinct on (abbrev) name
227 from pg_timezone_names
228 where
229 abbrev = %(tz)s and
230 name ~ '^[^/]+/[^/]+$' and
231 name !~ '^Etc/'
232 """
233 args = {u'tz': timezone}
234
235 conn.commit()
236 curs = conn.cursor()
237
238 result = timezone
239 try:
240 curs.execute(cmd, args)
241 rows = curs.fetchall()
242 if len(rows) > 0:
243 result = rows[0][0]
244 _log.debug(u'[%s] maps to [%s]', timezone, result)
245 except:
246 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone)
247
248 curs.close()
249 conn.rollback()
250
251 return result
252
298
299
300
306
308 """Text mode request of database login parameters"""
309 import getpass
310 login = gmLoginInfo.LoginInfo()
311
312 print "\nPlease enter the required login parameters:"
313 try:
314 login.host = __prompted_input("host ['' = non-TCP/IP]: ", '')
315 login.database = __prompted_input("database [gnumed_v14]: ", 'gnumed_v14')
316 login.user = __prompted_input("user name: ", '')
317 tmp = 'password for "%s" (not shown): ' % login.user
318 login.password = getpass.getpass(tmp)
319 login.port = __prompted_input("port [5432]: ", 5432)
320 except KeyboardInterrupt:
321 _log.warning("user cancelled text mode login dialog")
322 print "user cancelled text mode login dialog"
323 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!"))
324
325 return login
326
328 """GUI (wx) input request for database login parameters.
329
330 Returns gmLoginInfo.LoginInfo object
331 """
332 import wx
333
334
335 if wx.GetApp() is None:
336 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!"))
337
338
339
340 import gmAuthWidgets
341 dlg = gmAuthWidgets.cLoginDialog(None, -1)
342 dlg.ShowModal()
343 login = dlg.panel.GetLoginInfo()
344 dlg.Destroy()
345
346
347 if login is None:
348 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!"))
349
350 return login
351
353 """Request login parameters for database connection.
354 """
355
356
357
358 if os.environ.has_key('DISPLAY'):
359
360 try:
361 return __request_login_params_gui_wx()
362 except:
363 pass
364
365
366 return __request_login_params_tui()
367
368
369
370
371 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
372 dsn_parts = []
373
374 if (database is not None) and (database.strip() != ''):
375 dsn_parts.append('dbname=%s' % database)
376
377 if (host is not None) and (host.strip() != ''):
378 dsn_parts.append('host=%s' % host)
379
380 if (port is not None) and (str(port).strip() != ''):
381 dsn_parts.append('port=%s' % port)
382
383 if (user is not None) and (user.strip() != ''):
384 dsn_parts.append('user=%s' % user)
385
386 if (password is not None) and (password.strip() != ''):
387 dsn_parts.append('password=%s' % password)
388
389 dsn_parts.append('sslmode=prefer')
390
391 return ' '.join(dsn_parts)
392
397
407
427
428
429
431 expected_hash = known_schema_hashes[version]
432 if version == 'devel':
433 args = {'ver': '9999'}
434 else:
435 args = {'ver': version.strip('v')}
436 rows, idx = run_ro_queries (
437 link_obj = link_obj,
438 queries = [{
439 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5',
440 'args': args
441 }]
442 )
443 if rows[0]['md5'] != expected_hash:
444 _log.error('database schema version mismatch')
445 _log.error('expected: %s (%s)' % (version, expected_hash))
446 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5']))
447 if verbose:
448 _log.debug('schema dump follows:')
449 for line in get_schema_structure(link_obj=link_obj).split():
450 _log.debug(line)
451 _log.debug('schema revision history dump follows:')
452 for line in get_schema_revision_history(link_obj=link_obj):
453 _log.debug(u' - '.join(line))
454 return False
455 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5']))
456 return True
457
459 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}])
460 try:
461 return map_schema_hash2version[rows[0]['md5']]
462 except KeyError:
463 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
464
466 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}])
467 return rows[0][0]
468
470 cmd = u"""
471 select
472 imported::text,
473 version,
474 filename
475 from gm.schema_revision
476 order by imported
477 """
478 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}])
479 return rows
480
482 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}])
483 return rows[0][0]
484
486 """Get the foreign keys pointing to schema.table.column.
487
488 Does not properly work with multi-column FKs.
489 GNUmed doesn't use any, however.
490 """
491 cmd = u"""
492 select
493 %(schema)s as referenced_schema,
494 %(tbl)s as referenced_table,
495 %(col)s as referenced_column,
496 pgc.confkey as referenced_column_list,
497 pgc.conrelid::regclass as referencing_table,
498 pgc.conkey as referencing_column_list,
499 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column
500 from
501 pg_constraint pgc
502 where
503 pgc.contype = 'f'
504 and
505 pgc.confrelid = (
506 select oid from pg_class where relname = %(tbl)s and relnamespace = (
507 select oid from pg_namespace where nspname = %(schema)s
508 )
509 ) and
510 (
511 select attnum
512 from pg_attribute
513 where
514 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = (
515 select oid from pg_namespace where nspname = %(schema)s
516 ))
517 and
518 attname = %(col)s
519 ) = any(pgc.confkey)
520 """
521
522 args = {
523 'schema': schema,
524 'tbl': table,
525 'col': column
526 }
527
528 rows, idx = run_ro_queries (
529 link_obj = link_obj,
530 queries = [
531 {'cmd': cmd, 'args': args}
532 ]
533 )
534
535 return rows
536
538 """Return child tables of <table>."""
539 cmd = u"""
540 select
541 pgn.nspname as namespace,
542 pgc.relname as table
543 from
544 pg_namespace pgn,
545 pg_class pgc
546 where
547 pgc.relnamespace = pgn.oid
548 and
549 pgc.oid in (
550 select inhrelid from pg_inherits where inhparent = (
551 select oid from pg_class where
552 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and
553 relname = %(table)s
554 )
555 )"""
556 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}])
557 return rows
558
560 """Returns false, true."""
561 cmd = u"""
562 select exists (
563 select 1 from information_schema.tables
564 where
565 table_schema = %s and
566 table_name = %s and
567 table_type = 'BASE TABLE'
568 )"""
569 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}])
570 return rows[0][0]
571
573 if cursor.description is None:
574 _log.error('no result description available: unused cursor or last query did not select rows')
575 return None
576 col_indices = {}
577 col_index = 0
578 for col_desc in cursor.description:
579 col_name = col_desc[0]
580
581
582
583 if col_indices.has_key(col_name):
584 col_name = '%s_%s' % (col_name, col_index)
585 col_indices[col_name] = col_index
586 col_index += 1
587
588 return col_indices
589
590 -def get_col_defs(link_obj=None, schema='public', table=None):
591 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}])
592 col_names = []
593 col_type = {}
594 for row in rows:
595 col_names.append(row[0])
596
597 if row[1].startswith('_'):
598 col_type[row[0]] = row[1][1:] + '[]'
599 else:
600 col_type[row[0]] = row[1]
601 col_defs = []
602 col_defs.append(col_names)
603 col_defs.append(col_type)
604 return col_defs
605
607 """Return column attributes of table"""
608 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}])
609 cols = []
610 for row in rows:
611 cols.append(row[0])
612 return cols
613
615 rows, idx = run_ro_queries (
616 queries = [{'cmd': u'select distinct lang from i18n.translations'}]
617 )
618 return [ r[0] for r in rows ]
619
621 cmd = u'select i18n.get_curr_lang()'
622 rows, idx = run_ro_queries(queries = [{'cmd': cmd}])
623 return rows[0][0]
624
626 """Set the user language in the database.
627
628 user = None: current db user
629 language = None: unset
630 """
631 _log.info('setting database language for user [%s] to [%s]', user, language)
632
633 args = {
634 'usr': user,
635 'lang': language
636 }
637
638 if language is None:
639 if user is None:
640 queries = [{'cmd': u'select i18n.unset_curr_lang()'}]
641 else:
642 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}]
643 queries.append({'cmd': u'select True'})
644 else:
645 if user is None:
646 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}]
647 else:
648 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}]
649
650 rows, idx = run_rw_queries(queries = queries, return_data = True)
651
652 if not rows[0][0]:
653 _log.error('cannot set database language to [%s] for user [%s]', language, user)
654
655 return rows[0][0]
656
658 """Set the user language in the database.
659
660 - regardless of whether there is any translation available.
661 - only for the current user
662 """
663 _log.info('forcing database language for current db user to [%s]', language)
664
665 run_rw_queries(queries = [{
666 'cmd': u'select i18n.force_curr_lang(%(lang)s)',
667 'args': {'lang': language}
668 }])
669
670
671 text_expansion_keywords = None
672
674 global text_expansion_keywords
675 if text_expansion_keywords is not None:
676 return text_expansion_keywords
677
678 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions"""
679 rows, idx = run_ro_queries(queries = [{'cmd': cmd}])
680 text_expansion_keywords = rows
681
682 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords))
683
684 return text_expansion_keywords
685
687
688
689 if keyword == u'$$steffi':
690 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)'
691
692 cmd = u"""select expansion from clin.v_your_keyword_expansions where keyword = %(kwd)s"""
693 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}])
694
695 if len(rows) == 0:
696 return None
697
698 return rows[0]['expansion']
699
701
702 if keyword is None:
703 return []
704
705 get_text_expansion_keywords()
706
707 candidates = []
708 for kwd in text_expansion_keywords:
709 if kwd['keyword'].startswith(keyword):
710 candidates.append(kwd['keyword'])
711
712 return candidates
713
714 -def add_text_expansion(keyword=None, expansion=None, public=None):
715
716 if public:
717 cmd = u"select 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s"
718 else:
719 cmd = u"select 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s"
720
721 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}])
722 if len(rows) != 0:
723 return False
724
725 if public:
726 cmd = u"""
727 insert into clin.keyword_expansion (keyword, expansion, fk_staff)
728 values (%(kwd)s, %(exp)s, null)"""
729 else:
730 cmd = u"""
731 insert into clin.keyword_expansion (keyword, expansion, fk_staff)
732 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))"""
733
734 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}])
735
736 global text_expansion_keywords
737 text_expansion_keywords = None
738
739 return True
740
742 cmd = u"""
743 delete from clin.keyword_expansion where
744 keyword = %(kwd)s and (
745 (fk_staff = (select pk from dem.staff where db_user = current_user))
746 or
747 (fk_staff is null and owner = current_user)
748 )"""
749 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}])
750
751 global text_expansion_keywords
752 text_expansion_keywords = None
753
754 -def edit_text_expansion(keyword, expansion):
755
756 cmd1 = u"""
757 delete from clin.keyword_expansion where
758 keyword = %(kwd)s and
759 fk_staff = (select pk from dem.staff where db_user = current_user)"""
760
761 cmd2 = u"""
762 insert into clin.keyword_expansion (keyword, expansion, fk_staff)
763 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))"""
764
765 rows, idx = run_rw_queries(queries = [
766 {'cmd': cmd1, 'args': {'kwd': keyword}},
767 {'cmd': cmd2, 'args': {'kwd': keyword, 'exp': expansion}},
768 ])
769
770 global text_expansion_keywords
771 text_expansion_keywords = None
772
773
774
776 cmd = u'notify "db_maintenance_warning:"'
777 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
778
780 cmd = u'notify "db_maintenance_disconnect:"'
781 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
782
784 cmd = u'select %(candidate)s::interval'
785 try:
786 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}])
787 return True
788 except:
789 cmd = u'select %(candidate)s::text::interval'
790 try:
791 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}])
792 return True
793 except:
794 return False
795
796 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
797 outfile = file(filename, 'wb')
798 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query)
799 outfile.close()
800 return result
801
802 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
803 """Store data from a bytea field into a file.
804
805 <data_query>
806 - dict {'cmd': ..., 'args': ...}
807 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..."
808 - 'args' must be a dict
809 - must return one row with one field of type bytea
810 <file>
811 - must be a file like Python object
812 <data_size>
813 - integer of the total size of the expected data or None
814 <data_size_query>
815 - dict {'cmd': ..., 'args': ...}
816 - cmd must be unicode
817 - must return one row with one field with the octet_length() of the data field
818 - used only when <data_size> is None
819 """
820 if data_size == 0:
821 return True
822
823
824
825
826
827
828
829
830
831
832
833
834 conn = get_raw_connection(readonly=True)
835
836 if data_size is None:
837 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query])
838 data_size = rows[0][0]
839 if data_size in [None, 0]:
840 conn.rollback()
841 return True
842
843 _log.debug('expecting bytea data of size: [%s] bytes' % data_size)
844 _log.debug('using chunk size of: [%s] bytes' % chunk_size)
845
846
847 if chunk_size == 0:
848 chunk_size = data_size
849 _log.debug('chunk size [0] bytes: retrieving all data at once')
850
851
852
853
854 needed_chunks, remainder = divmod(data_size, chunk_size)
855 _log.debug('chunks to retrieve: [%s]' % needed_chunks)
856 _log.debug('remainder to retrieve: [%s] bytes' % remainder)
857
858
859
860
861
862
863
864 for chunk_id in range(needed_chunks):
865 chunk_start = (chunk_id * chunk_size) + 1
866 data_query['args']['start'] = chunk_start
867 data_query['args']['size'] = chunk_size
868 try:
869 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query])
870 except:
871 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size))
872 conn.rollback()
873 raise
874
875 file_obj.write(str(rows[0][0]))
876
877
878 if remainder > 0:
879 chunk_start = (needed_chunks * chunk_size) + 1
880 data_query['args']['start'] = chunk_start
881 data_query['args']['size'] = remainder
882 try:
883 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query])
884 except:
885 _log.error('cannot retrieve remaining [%s] bytes' % remainder)
886 conn.rollback()
887 raise
888
889 file_obj.write(str(rows[0][0]))
890
891 conn.rollback()
892 return True
893
894 -def file2bytea(query=None, filename=None, args=None, conn=None):
895 """Store data from a file into a bytea field.
896
897 The query must:
898 - be in unicode
899 - contain a format spec identifying the row (eg a primary key)
900 matching <args> if it is an UPDATE
901 - contain a format spec %(data)s::bytea
902 """
903
904 infile = file(filename, "rb")
905 data_as_byte_string = infile.read()
906 infile.close()
907 if args is None:
908 args = {}
909 args['data'] = buffer(data_as_byte_string)
910 del(data_as_byte_string)
911
912
913 if conn is None:
914 conn = get_raw_connection(readonly=False)
915 close_conn = True
916 else:
917 close_conn = False
918
919 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True)
920
921 if close_conn:
922 conn.close()
923
924 return
925
927 """Escape input for use in a PostgreSQL regular expression.
928
929 If a fragment comes from user input and is to be used
930 as a regular expression we need to make sure it doesn't
931 contain invalid regex patterns such as unbalanced ('s.
932
933 <escape_all>
934 True: try to escape *all* metacharacters
935 False: only escape those which render the regex invalid
936 """
937 return expression.replace (
938 '(', '\('
939 ).replace (
940 ')', '\)'
941 ).replace (
942 '[', '\['
943 ).replace (
944 '+', '\+'
945 ).replace (
946 '.', '\.'
947 ).replace (
948 '*', '\*'
949 )
950
951
952 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
953 """Run read-only queries.
954
955 <queries> must be a list of dicts:
956 [
957 {'cmd': <string>, 'args': <dict> or <tuple>},
958 {...},
959 ...
960 ]
961 """
962 if isinstance(link_obj, dbapi._psycopg.cursor):
963 curs = link_obj
964 curs_close = __noop
965 tx_rollback = __noop
966 elif isinstance(link_obj, dbapi._psycopg.connection):
967 curs = link_obj.cursor()
968 curs_close = curs.close
969 tx_rollback = link_obj.rollback
970 elif link_obj is None:
971 conn = get_connection(readonly=True, verbose=verbose)
972 curs = conn.cursor()
973 curs_close = curs.close
974 tx_rollback = conn.rollback
975 else:
976 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj)
977
978 if verbose:
979 _log.debug('cursor: %s', curs)
980
981 for query in queries:
982 if type(query['cmd']) is not types.UnicodeType:
983 print "run_ro_queries(): non-unicode query"
984 print query['cmd']
985 try:
986 args = query['args']
987 except KeyError:
988 args = None
989 try:
990 curs.execute(query['cmd'], args)
991 if verbose:
992 _log.debug('ran query: [%s]', curs.query)
993 _log.debug('PG status message: %s', curs.statusmessage)
994 _log.debug('cursor description: %s', str(curs.description))
995 except:
996
997 try:
998 curs_close()
999 except dbapi.InterfaceError:
1000 _log.exception('cannot close cursor')
1001 tx_rollback()
1002 _log.error('query failed: [%s]', curs.query)
1003 _log.error('PG status message: %s', curs.statusmessage)
1004 raise
1005
1006 data = None
1007 col_idx = None
1008 if return_data:
1009 data = curs.fetchall()
1010 if verbose:
1011 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data))
1012 _log.debug('cursor description: %s', str(curs.description))
1013 if get_col_idx:
1014 col_idx = get_col_indices(curs)
1015
1016 curs_close()
1017 tx_rollback()
1018 return (data, col_idx)
1019
1020 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1021 """Convenience function for running a transaction
1022 that is supposed to get committed.
1023
1024 <link_obj>
1025 can be either:
1026 - a cursor
1027 - a connection
1028
1029 <queries>
1030 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>)
1031 to be executed as a single transaction, the last
1032 query may usefully return rows (such as a
1033 "select currval('some_sequence')" statement)
1034
1035 <end_tx>
1036 - controls whether the transaction is finalized (eg.
1037 committed/rolled back) or not, this allows the
1038 call to run_rw_queries() to be part of a framing
1039 transaction
1040 - if link_obj is a connection then <end_tx> will
1041 default to False unless it is explicitly set to
1042 True which is taken to mean "yes, you do have full
1043 control over the transaction" in which case the
1044 transaction is properly finalized
1045 - if link_obj is a cursor we CANNOT finalize the
1046 transaction because we would need the connection for that
1047 - if link_obj is None <end_tx> will, of course, always be True
1048
1049 <return_data>
1050 - if true, the returned data will include the rows
1051 the last query selected
1052 - if false, it returns None instead
1053
1054 <get_col_idx>
1055 - if true, the returned data will include a dictionary
1056 mapping field names to column positions
1057 - if false, the returned data returns None instead
1058
1059 method result:
1060 - returns a tuple (data, idx)
1061 - <data>:
1062 * (None, None) if last query did not return rows
1063 * ("fetchall() result", <index>) if last query returned any rows
1064 * for <index> see <get_col_idx>
1065 """
1066 if isinstance(link_obj, dbapi._psycopg.cursor):
1067 conn_close = __noop
1068 conn_commit = __noop
1069 conn_rollback = __noop
1070 curs = link_obj
1071 curs_close = __noop
1072 elif isinstance(link_obj, dbapi._psycopg.connection):
1073 conn_close = __noop
1074 if end_tx:
1075 conn_commit = link_obj.commit
1076 conn_rollback = link_obj.rollback
1077 else:
1078 conn_commit = __noop
1079 conn_rollback = __noop
1080 curs = link_obj.cursor()
1081 curs_close = curs.close
1082 elif link_obj is None:
1083 conn = get_connection(readonly=False)
1084 conn_close = conn.close
1085 conn_commit = conn.commit
1086 conn_rollback = conn.rollback
1087 curs = conn.cursor()
1088 curs_close = curs.close
1089 else:
1090 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj)
1091
1092 for query in queries:
1093 if type(query['cmd']) is not types.UnicodeType:
1094 print "run_rw_queries(): non-unicode query"
1095 print query['cmd']
1096 try:
1097 args = query['args']
1098 except KeyError:
1099 args = None
1100 try:
1101 curs.execute(query['cmd'], args)
1102 except:
1103 _log.exception('error running RW query')
1104 gmLog2.log_stack_trace()
1105 try:
1106 curs_close()
1107 conn_rollback()
1108 conn_close()
1109 except dbapi.InterfaceError:
1110 _log.exception('cannot cleanup')
1111 raise
1112 raise
1113
1114 data = None
1115 col_idx = None
1116 if return_data:
1117 try:
1118 data = curs.fetchall()
1119 except:
1120 _log.exception('error fetching data from RW query')
1121 gmLog2.log_stack_trace()
1122 try:
1123 curs_close()
1124 conn_rollback()
1125 conn_close()
1126 except dbapi.InterfaceError:
1127 _log.exception('cannot cleanup')
1128 raise
1129 raise
1130 if get_col_idx:
1131 col_idx = get_col_indices(curs)
1132
1133 curs_close()
1134 conn_commit()
1135 conn_close()
1136
1137 return (data, col_idx)
1138
1139
1140
1142 """
1143 GNUmed database connection pool.
1144
1145 Extends psycopg2's PersistentConnectionPool with
1146 a custom _connect() function. Supports one connection
1147 per thread - which also ties it to one particular DSN.
1148 """
1149
1151
1152 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True)
1153
1154 conn.original_close = conn.close
1155 conn.close = _raise_exception_on_ro_conn_close
1156
1157 if key is not None:
1158 self._used[key] = conn
1159 self._rused[id(conn)] = key
1160 else:
1161 self._pool.append(conn)
1162
1163 return conn
1164
1166 for conn_key in self._used.keys():
1167 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid())
1168 self._used[conn_key].original_close()
1169
1171 """Get a raw, unadorned connection.
1172
1173 - this will not set any parameters such as encoding, timezone, datestyle
1174 - the only requirement is a valid DSN
1175 - hence it can be used for "service" connections
1176 for verifying encodings etc
1177 """
1178
1179 if dsn is None:
1180 dsn = get_default_dsn()
1181
1182 try:
1183 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection)
1184 except dbapi.OperationalError, e:
1185
1186 t, v, tb = sys.exc_info()
1187 try:
1188 msg = e.args[0]
1189 except (AttributeError, IndexError, TypeError):
1190 raise
1191
1192 msg = unicode(msg, gmI18N.get_encoding(), 'replace')
1193
1194 if msg.find('fe_sendauth') != -1:
1195 raise cAuthenticationError, (dsn, msg), tb
1196
1197 if regex.search('user ".*" does not exist', msg) is not None:
1198 raise cAuthenticationError, (dsn, msg), tb
1199
1200 if msg.find('uthenti') != -1:
1201 raise cAuthenticationError, (dsn, msg), tb
1202
1203 raise
1204
1205 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly)
1206
1207
1208 global postgresql_version
1209 if postgresql_version is None:
1210 curs = conn.cursor()
1211 curs.execute ("""
1212 select
1213 (split_part(setting, '.', 1) || '.' || split_part(setting, '.', 2))::numeric as version
1214 from pg_settings
1215 where name='server_version'"""
1216 )
1217 postgresql_version = curs.fetchone()['version']
1218 _log.info('PostgreSQL version (numeric): %s' % postgresql_version)
1219 try:
1220 curs.execute("select pg_size_pretty(pg_database_size(current_database()))")
1221 _log.info('database size: %s', curs.fetchone()[0])
1222 except:
1223 pass
1224 if verbose:
1225 __log_PG_settings(curs=curs)
1226 curs.close()
1227 conn.commit()
1228
1229 if _default_client_timezone is None:
1230 __detect_client_timezone(conn = conn)
1231
1232 curs = conn.cursor()
1233
1234
1235 if readonly:
1236 _log.debug('access mode [READ ONLY]')
1237 cmd = 'set session characteristics as transaction READ ONLY'
1238 curs.execute(cmd)
1239 cmd = 'set default_transaction_read_only to on'
1240 curs.execute(cmd)
1241 else:
1242 _log.debug('access mode [READ WRITE]')
1243 cmd = 'set session characteristics as transaction READ WRITE'
1244 curs.execute(cmd)
1245 cmd = 'set default_transaction_read_only to off'
1246 curs.execute(cmd)
1247
1248 curs.close()
1249 conn.commit()
1250
1251 conn.is_decorated = False
1252
1253 return conn
1254
1255 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1256 """Get a new connection.
1257
1258 This assumes the locale system has been initialzied
1259 unless an encoding is specified.
1260 """
1261
1262
1263 if pooled and readonly and (dsn is None):
1264 global __ro_conn_pool
1265 if __ro_conn_pool is None:
1266 __ro_conn_pool = cConnectionPool (
1267 minconn = 1,
1268 maxconn = 2,
1269 dsn = dsn,
1270 verbose = verbose
1271 )
1272 conn = __ro_conn_pool.getconn()
1273 else:
1274 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False)
1275
1276 if conn.is_decorated:
1277 return conn
1278
1279 if encoding is None:
1280 encoding = _default_client_encoding
1281 if encoding is None:
1282 encoding = gmI18N.get_encoding()
1283 _log.warning('client encoding not specified')
1284 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding)
1285 _log.warning('for this to work properly the application MUST have called locale.setlocale() before')
1286
1287
1288
1289 try:
1290 conn.set_client_encoding(encoding)
1291 except dbapi.OperationalError:
1292 t, v, tb = sys.exc_info()
1293 if str(v).find("can't set encoding to") != -1:
1294 raise cEncodingError, (encoding, v), tb
1295 raise
1296
1297
1298 if readonly:
1299 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
1300 iso_level = u'read committed'
1301 else:
1302 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE)
1303 iso_level = u'serializable'
1304
1305 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s], datestyle [ISO], sql_inheritance [ON]', encoding, iso_level, _default_client_timezone)
1306
1307 curs = conn.cursor()
1308
1309
1310 curs.execute(_sql_set_timezone, [_default_client_timezone])
1311
1312
1313
1314
1315 cmd = "set datestyle to 'ISO'"
1316 curs.execute(cmd)
1317
1318
1319 cmd = 'set sql_inheritance to on'
1320 curs.execute(cmd)
1321
1322
1323 global postgresql_version_string
1324 if postgresql_version_string is None:
1325 curs.execute('select version()')
1326 postgresql_version_string = curs.fetchone()['version']
1327 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string)
1328
1329 curs.close()
1330 conn.commit()
1331
1332 conn.is_decorated = True
1333
1334 return conn
1335
1340
1341
1342
1345
1347 raise TypeError(u'close() called on read-only connection')
1348
1350 """Check server time and local time to be within
1351 the given tolerance of each other.
1352
1353 tolerance: seconds
1354 """
1355 _log.debug('maximum skew tolerance (seconds): %s', tolerance)
1356
1357 cmd = u"select now() at time zone 'UTC'"
1358 conn = get_raw_connection(readonly=True)
1359 curs = conn.cursor()
1360
1361 start = time.time()
1362 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}])
1363 end = time.time()
1364 client_now_as_utc = pydt.datetime.utcnow()
1365
1366 curs.close()
1367 conn.commit()
1368
1369 server_now_as_utc = rows[0][0]
1370 query_duration = end - start
1371 _log.info('server "now" (UTC): %s', server_now_as_utc)
1372 _log.info('client "now" (UTC): %s', client_now_as_utc)
1373 _log.debug('wire roundtrip (seconds): %s', query_duration)
1374
1375 if query_duration > tolerance:
1376 _log.error('useless to check client/server time skew, wire roundtrip > tolerance')
1377 return False
1378
1379 if server_now_as_utc > client_now_as_utc:
1380 real_skew = server_now_as_utc - client_now_as_utc
1381 else:
1382 real_skew = client_now_as_utc - server_now_as_utc
1383
1384 _log.debug('client/server time skew: %s', real_skew)
1385
1386 if real_skew > pydt.timedelta(seconds = tolerance):
1387 _log.error('client/server time skew > tolerance')
1388 return False
1389
1390 return True
1391
1393 """Checks database settings.
1394
1395 returns (status, message)
1396 status:
1397 0: no problem
1398 1: non-fatal problem
1399 2: fatal problem
1400 """
1401 _log.debug('checking database settings')
1402 settings = {
1403
1404 u'allow_system_table_mods': [u'off', u'system breakage', False],
1405 u'check_function_bodies': [u'on', u'suboptimal error detection', False],
1406 u'default_transaction_read_only': [u'on', u'accidental database writes', False],
1407 u'fsync': [u'on', u'data loss/corruption', True],
1408 u'full_page_writes': [u'on', u'data loss/corruption', False],
1409 u'lc_messages': [u'C', u'suboptimal error detection', False],
1410 u'password_encryption': [u'on', u'breach of confidentiality', False],
1411 u'regex_flavor': [u'advanced', u'query breakage', False],
1412 u'synchronous_commit': [u'on', u'data loss/corruption', False],
1413 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True]
1414 }
1415
1416 from Gnumed.pycommon import gmCfg2
1417 _cfg = gmCfg2.gmCfgData()
1418 if _cfg.get(option = u'hipaa'):
1419 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True]
1420 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True]
1421 else:
1422 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None]
1423 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None]
1424
1425 cmd = u"select name, setting from pg_settings where name in %(settings)s"
1426 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'settings': tuple(settings.keys())}}])
1427
1428 found_error = False
1429 found_problem = False
1430 msg = []
1431 for row in rows:
1432 if row[1] != settings[row[0]][0]:
1433 if settings[row[0]][2] is True:
1434 found_error = True
1435 elif settings[row[0]][2] is False:
1436 found_problem = True
1437 elif settings[row[0]][2] is None:
1438 pass
1439 else:
1440 _log.error(settings[row[0]])
1441 raise ValueError(u'invalid database configuration sanity check')
1442 msg.append(_(' option [%s]: %s') % (row[0], row[1]))
1443 msg.append(_(' risk: %s') % settings[row[0]][1])
1444 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (row[0], row[1], settings[row[0]][0], settings[row[0]][1]))
1445
1446 if found_error:
1447 return 2, u'\n'.join(msg)
1448
1449 if found_problem:
1450 return 1, u'\n'.join(msg)
1451
1452 return 0, u''
1453
1455
1456
1457
1458 try:
1459 curs.execute(u'show all')
1460 except:
1461 _log.exception(u'cannot log PG settings (>>>show all<<< failed)')
1462 return False
1463 settings = curs.fetchall()
1464 if settings is None:
1465 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)')
1466 return False
1467 for setting in settings:
1468 _log.debug(u'PG option [%s]: %s', setting[0], setting[1])
1469 return True
1470
1472
1473 try:
1474 msg = exc.args[0]
1475 except (AttributeError, IndexError, TypeError):
1476 return u'cannot extract message from exception'
1477
1478 return unicode(msg, gmI18N.get_encoding(), 'replace')
1479
1481
1482 - def __init__(self, dsn=None, prev_val=None):
1483 self.dsn = dsn
1484 self.prev_val = prev_val
1485
1487 _log.warning('%s.__str__() called', self.__class__.__name__)
1488 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1489 _log.error(tmp)
1490 return tmp.encode(gmI18N.get_encoding(), 'replace')
1491
1493 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1494
1495
1496
1497
1499
1500 - def __init__(self, encoding=None, prev_val=None):
1501 self.encoding = encoding
1502 self.prev_val = prev_val
1503
1505 _log.warning('%s.__str__() called', self.__class__.__name__)
1506 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1507
1509 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1510
1511
1512
1513
1515
1517 if dt.tzinfo is None:
1518 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat())
1519 self.__dt = dt
1520
1523
1524
1526
1528 if dt.tz == '???':
1529 _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt)
1530 self.__dt = dt
1531
1533
1534
1535
1536
1537
1538 return mxDT.ISO.str(self.__dt).replace(',', '.')
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1553
1554 try:
1555 return dbapi.DATETIME(string_value, cursor)
1556 except (dbapi.DataError,), exc:
1557 _log.error('unable to parse [%s]' % string_value)
1558
1559 if string_value is None:
1560 raise
1561
1562 if exc.message != "unable to parse time":
1563 raise
1564
1565 _log.debug('unable to parse as <timestamp with time zone>')
1566
1567 if regex.match('(\+|-)\d\d:\d\d:\d\d', string_value[-9:]) is None:
1568 raise
1569
1570 if regex.match('-\d\d:\d\d:\d\d', string_value[-9:]) is not None:
1571 if string_value[-5:-3] != '00':
1572 _log.debug('psycopg2 versions < 2.0.8 may misinterpret this time zone: [%s]', string_value[-9:])
1573
1574
1575
1576 _log.debug('time zone with seconds detected (true local time ?): %s', string_value[-9:])
1577 truncated_string_value = string_value[:-3]
1578 _log.warning('truncating to [%s] and trying again', truncated_string_value)
1579 _log.warning('value will be off by %s seconds', string_value[-2:])
1580 return dbapi.DATETIME(truncated_string_value, cursor)
1581
1582
1583 TIMESTAMPTZ_OID = 1184
1584 if TIMESTAMPTZ_OID not in dbapi.DATETIME.values:
1585 raise ImportError('TIMESTAMPTZ_OID <%s> not in psycopg2.DATETIME.values [%s]' % (TIMESTAMPTZ_OID, dbapi.DATETIME.values))
1586
1587 DT_W_ODD_TZ = psycopg2.extensions.new_type((TIMESTAMPTZ_OID,), 'DT_W_ODD_TZ', convert_ts_with_odd_tz)
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
1599 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY)
1600
1601
1602 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime)
1603 try:
1604 import mx.DateTime as mxDT
1605 psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime)
1606 except ImportError:
1607 _log.warning('cannot import mx.DateTime')
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618 if __name__ == "__main__":
1619
1620 logging.basicConfig(level=logging.DEBUG)
1621
1623 run_rw_queries(queries = [
1624 {'cmd': u'create table test_bytea (data bytea)'}
1625 ])
1626
1627 cmd = u'insert into test_bytea values (%(data)s::bytea)'
1628 try:
1629 file2bytea(query = cmd, filename = sys.argv[2])
1630 except:
1631 _log.exception('error')
1632
1633 run_rw_queries(queries = [
1634 {'cmd': u'drop table test_bytea'}
1635 ])
1636
1638 print "testing get_connection()"
1639
1640 dsn = 'foo'
1641 try:
1642 conn = get_connection(dsn=dsn)
1643 except dbapi.OperationalError, e:
1644 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1645 t, v = sys.exc_info()[:2]
1646 print ' ', t
1647 print ' ', v
1648
1649 dsn = 'dbname=gnumed_v9'
1650 try:
1651 conn = get_connection(dsn=dsn)
1652 except cAuthenticationError:
1653 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1654 t, v = sys.exc_info()[:2]
1655 print ' ', t
1656 print ' ', v
1657
1658 dsn = 'dbname=gnumed_v9 user=abc'
1659 try:
1660 conn = get_connection(dsn=dsn)
1661 except cAuthenticationError:
1662 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1663 t, v = sys.exc_info()[:2]
1664 print ' ', t
1665 print ' ', v
1666
1667 dsn = 'dbname=gnumed_v9 user=any-doc'
1668 try:
1669 conn = get_connection(dsn=dsn)
1670 except cAuthenticationError:
1671 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1672 t, v = sys.exc_info()[:2]
1673 print ' ', t
1674 print ' ', v
1675
1676 dsn = 'dbname=gnumed_v9 user=any-doc password=abc'
1677 try:
1678 conn = get_connection(dsn=dsn)
1679 except cAuthenticationError:
1680 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1681 t, v = sys.exc_info()[:2]
1682 print ' ', t
1683 print ' ', v
1684
1685 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
1686 conn = get_connection(dsn=dsn, readonly=True)
1687
1688 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
1689 conn = get_connection(dsn=dsn, readonly=False)
1690
1691 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
1692 encoding = 'foo'
1693 try:
1694 conn = get_connection(dsn=dsn, encoding=encoding)
1695 except cEncodingError:
1696 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding)
1697 t, v = sys.exc_info()[:2]
1698 print ' ', t
1699 print ' ', v
1700
1702 print "testing exceptions"
1703
1704 try:
1705 raise cAuthenticationError('no dsn', 'no previous exception')
1706 except cAuthenticationError:
1707 t, v, tb = sys.exc_info()
1708 print t
1709 print v
1710 print tb
1711
1712 try:
1713 raise cEncodingError('no dsn', 'no previous exception')
1714 except cEncodingError:
1715 t, v, tb = sys.exc_info()
1716 print t
1717 print v
1718 print tb
1719
1721 print "testing run_ro_queries()"
1722
1723 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
1724 conn = get_connection(dsn, readonly=True)
1725
1726 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True)
1727 print data
1728 print idx
1729 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True)
1730 print data
1731 print idx
1732
1733 curs = conn.cursor()
1734
1735 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True)
1736 print data
1737 print idx
1738
1739 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True, verbose=True)
1740 print data
1741 print idx
1742
1743 try:
1744 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True)
1745 print data
1746 print idx
1747 except psycopg2.ProgrammingError:
1748 print 'SUCCESS: run_ro_queries("selec 1") failed as expected'
1749 t, v = sys.exc_info()[:2]
1750 print ' ', t
1751 print ' ', v
1752
1753 curs.close()
1754
1759
1761 print "testing set_default_client_encoding()"
1762
1763 enc = 'foo'
1764 try:
1765 set_default_client_encoding(enc)
1766 print "SUCCESS: encoding [%s] worked" % enc
1767 except ValueError:
1768 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1769 t, v = sys.exc_info()[:2]
1770 print ' ', t
1771 print ' ', v
1772
1773 enc = ''
1774 try:
1775 set_default_client_encoding(enc)
1776 print "SUCCESS: encoding [%s] worked" % enc
1777 except ValueError:
1778 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1779 t, v = sys.exc_info()[:2]
1780 print ' ', t
1781 print ' ', v
1782
1783 enc = 'latin1'
1784 try:
1785 set_default_client_encoding(enc)
1786 print "SUCCESS: encoding [%s] worked" % enc
1787 except ValueError:
1788 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1789 t, v = sys.exc_info()[:2]
1790 print ' ', t
1791 print ' ', v
1792
1793 enc = 'utf8'
1794 try:
1795 set_default_client_encoding(enc)
1796 print "SUCCESS: encoding [%s] worked" % enc
1797 except ValueError:
1798 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1799 t, v = sys.exc_info()[:2]
1800 print ' ', t
1801 print ' ', v
1802
1803 enc = 'unicode'
1804 try:
1805 set_default_client_encoding(enc)
1806 print "SUCCESS: encoding [%s] worked" % enc
1807 except ValueError:
1808 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1809 t, v = sys.exc_info()[:2]
1810 print ' ', t
1811 print ' ', v
1812
1813 enc = 'UNICODE'
1814 try:
1815 set_default_client_encoding(enc)
1816 print "SUCCESS: encoding [%s] worked" % enc
1817 except ValueError:
1818 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1819 t, v = sys.exc_info()[:2]
1820 print ' ', t
1821 print ' ', v
1822
1831
1833 dsn = get_default_dsn()
1834 conn = get_connection(dsn, readonly=True)
1835 curs = conn.cursor()
1836 curs.execute('select * from clin.clin_narrative where narrative = %s', ['a'])
1837
1839 tests = [
1840 ['(', '\\(']
1841 , ['[', '\\[']
1842 , [')', '\\)']
1843 ]
1844 for test in tests:
1845 result = sanitize_pg_regex(test[0])
1846 if result != test[1]:
1847 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
1848
1850 status = True
1851 tests = [
1852 [None, True],
1853 [1, True],
1854 ['1', True],
1855 ['abc', False]
1856 ]
1857
1858 if not is_pg_interval():
1859 print 'ERROR: is_pg_interval() returned "False", expected "True"'
1860 status = False
1861
1862 for test in tests:
1863 result = is_pg_interval(test[0])
1864 if result != test[1]:
1865 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
1866 status = False
1867
1868 return status
1869
1872
1880
1882 for row in get_foreign_keys2column (
1883 schema = u'dem',
1884 table = u'identity',
1885 column = u'pk'
1886 ):
1887 print '%s.%s references %s.%s.%s' % (
1888 row['referencing_table'],
1889 row['referencing_column'],
1890 row['referenced_schema'],
1891 row['referenced_table'],
1892 row['referenced_column']
1893 )
1894
1896
1897 tests = [
1898
1899 [None, 'de_DE', True],
1900 [None, 'lang_w/o_tx', False],
1901 [None, None, True],
1902
1903 ['any-doc', 'de_DE', True],
1904 ['any-doc', 'lang_w/o_tx', False],
1905 ['any-doc', None, True],
1906
1907 ['invalid user', 'de_DE', None],
1908 ['invalid user', 'lang_w/o_tx', False],
1909 ['invalid user', None, True]
1910 ]
1911 for test in tests:
1912 try:
1913 result = set_user_language(user = test[0], language = test[1])
1914 if result != test[2]:
1915 print "test:", test
1916 print "result:", result, "expected:", test[2]
1917 except psycopg2.IntegrityError, e:
1918 if test[2] is None:
1919 continue
1920 print "test:", test
1921 print "expected exception"
1922 print "result:", e
1923
1925 for line in get_schema_revision_history():
1926 print u' - '.join(line)
1927
1928 if len(sys.argv) > 1 and sys.argv[1] == 'test':
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944 test_get_schema_revision_history()
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
2337
2338
2339
2340
2341
2342
2343
2344
2345
2346
2347
2348
2349
2350
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360
2361
2362
2363
2364
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374
2375
2376
2377
2378
2379
2380
2381
2382
2383
2384
2385
2386
2387
2388
2389
2390
2391
2392
2393
2394
2395
2396
2397
2398