4646 except ImportError :
4747 raise ImportError ("You must have psycopg2 or pg8000 modules installed" )
4848
49-
5049bound_ports = set ()
5150registered_nodes = []
5251util_threads = []
5655
5756
5857class ClusterException (Exception ):
59-
6058 """
6159 Predefined exceptions
6260 """
6361 pass
6462
6563
6664class QueryException (Exception ):
67-
6865 """
6966 Predefined exceptions
7067 """
7168 pass
7269
7370
7471class InitPostgresNodeException (Exception ):
75-
7672 """
7773 Predefined exceptions
7874 """
@@ -138,12 +134,16 @@ def log_watch(node_name, pg_logname):
138134
139135
140136class NodeConnection (object ):
141-
142137 """
143138 Transaction wrapper returned by Node
144139 """
145140
146- def __init__ (self , parent_node , dbname , host = "127.0.0.1" , user = None , password = None ):
141+ def __init__ (self ,
142+ parent_node ,
143+ dbname ,
144+ host = "127.0.0.1" ,
145+ user = None ,
146+ password = None ):
147147 self .parent_node = parent_node
148148 if user is None :
149149 user = get_username ()
@@ -152,8 +152,7 @@ def __init__(self, parent_node, dbname, host="127.0.0.1", user=None, password=No
152152 user = user ,
153153 port = parent_node .port ,
154154 host = host ,
155- password = password
156- )
155+ password = password )
157156
158157 self .cursor = self .connection .cursor ()
159158
@@ -164,10 +163,13 @@ def __exit__(self, type, value, tb):
164163 self .connection .close ()
165164
166165 def begin (self , isolation_level = 0 ):
167- levels = ['read uncommitted' ,
168- 'read committed' ,
169- 'repeatable read' ,
170- 'serializable' ]
166+ # yapf: disable
167+ levels = [
168+ 'read uncommitted' ,
169+ 'read committed' ,
170+ 'repeatable read' ,
171+ 'serializable'
172+ ]
171173
172174 # Check if level is int [0..3]
173175 if (isinstance (isolation_level , int ) and
@@ -185,11 +187,11 @@ def begin(self, isolation_level=0):
185187
186188 # Something is wrong, emit exception
187189 else :
188- raise QueryException ('Invalid isolation level "{}"' . format (
189- isolation_level ))
190+ raise QueryException (
191+ 'Invalid isolation level "{}"' . format ( isolation_level ))
190192
191- self .cursor .execute ('SET TRANSACTION ISOLATION LEVEL {}' . format (
192- isolation_level ))
193+ self .cursor .execute (
194+ 'SET TRANSACTION ISOLATION LEVEL {}' . format ( isolation_level ))
193195
194196 def commit (self ):
195197 self .connection .commit ()
@@ -210,14 +212,13 @@ def close(self):
210212
211213
212214class PostgresNode (object ):
213-
214215 def __init__ (self , name , port , base_dir = None , use_logging = False ):
215216 global bound_ports
216217
217218 # check that port is not used
218219 if port in bound_ports :
219220 raise InitPostgresNodeException (
220- 'port {} is already in use' .format (port ))
221+ 'port {} is already in use' .format (port ))
221222
222223 # mark port as used
223224 bound_ports .add (port )
@@ -289,8 +290,10 @@ def initdb(self, directory, initdb_params=[]):
289290 stderr = subprocess .STDOUT )
290291
291292 if ret :
292- raise ClusterException ("Cluster initialization failed. You"
293- " can find additional information at '%s'" % initdb_logfile )
293+ raise ClusterException (
294+ "Cluster initialization failed. You"
295+ " can find additional information at '%s'" %
296+ initdb_logfile )
294297
295298 def _setup_data_dir (self , data_dir ):
296299 global base_data_dir
@@ -302,7 +305,6 @@ def _setup_data_dir(self, data_dir):
302305
303306 shutil .copytree (base_data_dir , data_dir )
304307
305-
306308 def init (self , allows_streaming = False , initdb_params = []):
307309 """ Performs initdb """
308310
@@ -322,24 +324,22 @@ def init(self, allows_streaming=False, initdb_params=[]):
322324
323325 # add parameters to config file
324326 with open (postgres_conf , "w" ) as conf :
327+ conf .write ("fsync = off\n "
328+ "log_statement = all\n "
329+ "port = {}\n " .format (self .port ))
325330 conf .write (
326- "fsync = off\n "
327- "log_statement = all\n "
328- "port = {}\n " .format (self .port ))
329- conf .write (
330- # "unix_socket_directories = '%s'\n"
331- # "listen_addresses = ''\n";)
331+ # "unix_socket_directories = '%s'\n"
332+ # "listen_addresses = ''\n";)
332333 "listen_addresses = '{}'\n " .format (self .host ))
333334
334335 if allows_streaming :
335336 # TODO: wal_level = hot_standby (9.5)
336- conf .write (
337- "max_wal_senders = 5\n "
338- "wal_keep_segments = 20\n "
339- "max_wal_size = 128MB\n "
340- "wal_log_hints = on\n "
341- "hot_standby = on\n "
342- "max_connections = 10\n " )
337+ conf .write ("max_wal_senders = 5\n "
338+ "wal_keep_segments = 20\n "
339+ "max_wal_size = 128MB\n "
340+ "wal_log_hints = on\n "
341+ "hot_standby = on\n "
342+ "max_connections = 10\n " )
343343 if get_config ().get ("VERSION_NUM" ) < 906000 :
344344 conf .write ("wal_level = hot_standby\n " )
345345 else :
@@ -349,7 +349,10 @@ def init(self, allows_streaming=False, initdb_params=[]):
349349
350350 return self
351351
352- def init_from_backup (self , root_node , backup_name , has_streaming = False ,
352+ def init_from_backup (self ,
353+ root_node ,
354+ backup_name ,
355+ has_streaming = False ,
353356 hba_permit_replication = True ):
354357 """Initializes cluster from backup, made by another node"""
355358
@@ -359,10 +362,7 @@ def init_from_backup(self, root_node, backup_name, has_streaming=False,
359362 os .chmod (self .data_dir , 0o0700 )
360363
361364 # Change port in config file
362- self .append_conf (
363- "postgresql.conf" ,
364- "port = {}" .format (self .port )
365- )
365+ self .append_conf ("postgresql.conf" , "port = {}" .format (self .port ))
366366 # Enable streaming
367367 if hba_permit_replication :
368368 self .set_replication_conf ()
@@ -378,9 +378,9 @@ def set_replication_conf(self):
378378 def enable_streaming (self , root_node ):
379379 recovery_conf = os .path .join (self .data_dir , "recovery.conf" )
380380 with open (recovery_conf , "a" ) as conf :
381- conf .write (
382- "primary_conninfo='{} application_name={}' \n "
383- "standby_mode=on \n " . format ( root_node . connstr , self .name ))
381+ conf .write ("primary_conninfo='{} application_name={}' \n "
382+ "standby_mode=on \n ". format ( root_node . connstr ,
383+ self .name ))
384384
385385 def append_conf (self , filename , string ):
386386 """Appends line to a config file like "postgresql.conf"
@@ -412,10 +412,7 @@ def pg_ctl(self, command, params={}, command_options=[]):
412412 open (self .error_filename , "a" ) as file_err :
413413
414414 res = subprocess .call (
415- arguments + command_options ,
416- stdout = file_out ,
417- stderr = file_err
418- )
415+ arguments + command_options , stdout = file_out , stderr = file_err )
419416
420417 if res > 0 :
421418 with open (self .error_filename , "r" ) as errfile :
@@ -427,7 +424,8 @@ def start(self, params={}):
427424 """ Starts cluster """
428425
429426 if self .use_logging :
430- tmpfile = tempfile .NamedTemporaryFile ('w' , dir = self .logs_dir , delete = False )
427+ tmpfile = tempfile .NamedTemporaryFile (
428+ 'w' , dir = self .logs_dir , delete = False )
431429 logfile = tmpfile .name
432430
433431 self .logger = log_watch (self .name , logfile )
@@ -454,8 +452,9 @@ def status(self):
454452 """
455453 try :
456454 res = subprocess .check_output ([
457- self .get_bin_path ("pg_ctl" ), 'status' , '-D' , '{0}' .format (self .data_dir )
458- ])
455+ self .get_bin_path ("pg_ctl" ), 'status' , '-D' ,
456+ '{0}' .format (self .data_dir )
457+ ])
459458 return True
460459 except subprocess .CalledProcessError as e :
461460 if e .returncode == 3 :
@@ -485,8 +484,7 @@ def get_control_data(self):
485484 try :
486485 lines = subprocess .check_output (
487486 [pg_controldata ] + ["-D" , self .data_dir ],
488- stderr = subprocess .STDOUT
489- ).decode ("utf-8" ).splitlines ()
487+ stderr = subprocess .STDOUT ).decode ("utf-8" ).splitlines ()
490488 except subprocess .CalledProcessError as e :
491489 raise PgcontroldataException (e .output , e .cmd )
492490
@@ -497,10 +495,7 @@ def get_control_data(self):
497495
498496 def stop (self , params = {}):
499497 """ Stops cluster """
500- _params = {
501- "-D" : self .data_dir ,
502- "-w" : None
503- }
498+ _params = {"-D" : self .data_dir , "-w" : None }
504499 _params .update (params )
505500 self .pg_ctl ("stop" , _params )
506501
@@ -513,10 +508,7 @@ def stop(self, params={}):
513508
514509 def restart (self , params = {}):
515510 """ Restarts cluster """
516- _params = {
517- "-D" : self .data_dir ,
518- "-w" : None
519- }
511+ _params = {"-D" : self .data_dir , "-w" : None }
520512 _params .update (params )
521513 self .pg_ctl ("restart" , _params )
522514
@@ -554,7 +546,8 @@ def psql(self, dbname, query=None, filename=None, username=None):
554546 """
555547 psql = self .get_bin_path ("psql" )
556548 psql_params = [
557- psql , "-XAtq" , "-h{}" .format (self .host ), "-p {}" .format (self .port ), dbname
549+ psql , "-XAtq" , "-h{}" .format (self .host ), "-p {}" .format (self .port ),
550+ dbname
558551 ]
559552
560553 if query :
@@ -570,10 +563,7 @@ def psql(self, dbname, query=None, filename=None, username=None):
570563
571564 # start psql process
572565 process = subprocess .Popen (
573- psql_params ,
574- stdout = subprocess .PIPE ,
575- stderr = subprocess .PIPE
576- )
566+ psql_params , stdout = subprocess .PIPE , stderr = subprocess .PIPE )
577567
578568 # wait untill it finishes and get stdout and stderr
579569 out , err = process .communicate ()
@@ -594,10 +584,8 @@ def dump(self, dbname, filename):
594584 """Invoke pg_dump and exports database to a file as an sql script"""
595585 path = os .path .join (self .base_dir , filename )
596586 params = [
597- self .get_bin_path ("pg_dump" ),
598- "-p {}" .format (self .port ),
599- "-f" , path ,
600- dbname
587+ self .get_bin_path ("pg_dump" ), "-p {}" .format (self .port ), "-f" ,
588+ path , dbname
601589 ]
602590
603591 with open (self .error_filename , "a" ) as file_err :
@@ -647,15 +635,13 @@ def backup(self, name):
647635 pg_basebackup = self .get_bin_path ("pg_basebackup" )
648636 backup_path = os .path .join (self .base_dir , name )
649637 os .makedirs (backup_path )
650- params = [pg_basebackup , "-D" , backup_path , "-p {}" .format (
651- self .port ), "-X" , "fetch" ]
638+ params = [
639+ pg_basebackup , "-D" , backup_path , "-p {}" .format (self .port ), "-X" ,
640+ "fetch"
641+ ]
652642 with open (self .output_filename , "a" ) as file_out , \
653643 open (self .error_filename , "a" ) as file_err :
654- ret = subprocess .call (
655- params ,
656- stdout = file_out ,
657- stderr = file_err
658- )
644+ ret = subprocess .call (params , stdout = file_out , stderr = file_err )
659645 if ret :
660646 raise ClusterException ("Base backup failed" )
661647
@@ -664,19 +650,12 @@ def backup(self, name):
664650 def pgbench_init (self , dbname = 'postgres' , scale = 1 , options = []):
665651 """Prepare pgbench database"""
666652 pgbench = self .get_bin_path ("pgbench" )
667- params = [
668- pgbench ,
669- "-i" ,
670- "-s" , "%i" % scale ,
671- "-p" , "%i" % self .port
672- ] + options + [dbname ]
653+ params = [pgbench , "-i" , "-s" ,
654+ "%i" % scale , "-p" ,
655+ "%i" % self .port ] + options + [dbname ]
673656 with open (self .output_filename , "a" ) as file_out , \
674657 open (self .error_filename , "a" ) as file_err :
675- ret = subprocess .call (
676- params ,
677- stdout = file_out ,
678- stderr = file_err
679- )
658+ ret = subprocess .call (params , stdout = file_out , stderr = file_err )
680659 if ret :
681660 raise ClusterException ("pgbench init failed" )
682661
@@ -685,15 +664,8 @@ def pgbench_init(self, dbname='postgres', scale=1, options=[]):
685664 def pgbench (self , dbname = 'postgres' , stdout = None , stderr = None , options = []):
686665 """Make pgbench process"""
687666 pgbench = self .get_bin_path ("pgbench" )
688- params = [
689- pgbench ,
690- "-p" , "%i" % self .port
691- ] + options + [dbname ]
692- proc = subprocess .Popen (
693- params ,
694- stdout = stdout ,
695- stderr = stderr
696- )
667+ params = [pgbench , "-p" , "%i" % self .port ] + options + [dbname ]
668+ proc = subprocess .Popen (params , stdout = stdout , stderr = stderr )
697669
698670 return proc
699671
@@ -715,7 +687,8 @@ def get_config():
715687
716688 try :
717689 out = six .StringIO (
718- subprocess .check_output ([pg_config_cmd ], universal_newlines = True ))
690+ subprocess .check_output (
691+ [pg_config_cmd ], universal_newlines = True ))
719692 for line in out :
720693 if line and "=" in line :
721694 key , value = line .split ("=" , 1 )
0 commit comments