90 lines
2.6 KiB
Python
Executable file
90 lines
2.6 KiB
Python
Executable file
#!/usr/bin/env python
|
|
import sys
|
|
import os
|
|
import re
|
|
# import time
|
|
import logging
|
|
import argparse as arg
|
|
import psycopg2 as pg
|
|
from psycopg2.extensions import AsIs
|
|
|
|
from classes.CustomFormater import CustomFormatter
|
|
from classes.Timer import Timer
|
|
|
|
# Schema name is NAMEDATALEN-1 (PGSQL source code)
|
|
# -> src/include/pg_config_manual.h
|
|
def check_schema_name(arg_value, pat=re.compile(r"^[a-z0-9A-Z]{1,63}$")):
|
|
if not pat.match(arg_value):
|
|
raise ValueError
|
|
return arg_value
|
|
|
|
|
|
def parse_args():
|
|
"""
|
|
Parse arguments
|
|
"""
|
|
parser = arg.ArgumentParser('Process csv files from INSEE')
|
|
|
|
parser.add_argument('--connection-file', '-f',
|
|
help='Postgresql connexion file',
|
|
default='.pgconn'
|
|
)
|
|
|
|
# As whe use AsIs function to not include '' in our sql queries for schema
|
|
# name, me mus ensure that it is not contains SQL special characters
|
|
# like comments --, final coma etc and avoid SQL injection
|
|
parser.add_argument('--schema-name',
|
|
help='Database schema name',
|
|
type=check_schema_name,
|
|
default='insee'
|
|
)
|
|
|
|
debug_group = parser.add_mutually_exclusive_group()
|
|
debug_group.add_argument('--verbose', '-V',
|
|
help='Verbose output',
|
|
action='store_true')
|
|
debug_group.add_argument('--debug', '-d',
|
|
help='Activate debug mode',
|
|
action='store_true')
|
|
return parser.parse_args()
|
|
|
|
|
|
if __name__ == '__main__':
|
|
args = parse_args()
|
|
#logging.basicConfig(level=logging.DEBUG)
|
|
logger = logging.getLogger()
|
|
tty_handler = logging.StreamHandler()
|
|
|
|
# create console handler with a higher log level
|
|
tty_handler.setFormatter(CustomFormatter())
|
|
logger.addHandler(tty_handler)
|
|
|
|
if args.verbose is True:
|
|
logger.setLevel(logging.INFO)
|
|
logger.info('VERBOSE mode activated')
|
|
|
|
if args.debug is True:
|
|
logger.setLevel(logging.DEBUG)
|
|
logger.debug('DEBUG mode activated')
|
|
|
|
t = Timer(logger=logger.info)
|
|
logging.debug('Import pgsql connection file {}'.format(args.connection_file))
|
|
with open(args.connection_file) as cf:
|
|
pg_conn = cf.read()
|
|
|
|
t.start('Delete schema')
|
|
logger.debug('Delete schema {}'.format(args.schema_name))
|
|
conn = pg.connect(pg_conn)
|
|
with conn.cursor() as curs:
|
|
try:
|
|
curs.execute('DROP SCHEMA IF EXISTS %s CASCADE',
|
|
(AsIs(args.schema_name),))
|
|
conn.commit()
|
|
except Exception as e:
|
|
logger.error('Can\'t drop schema {}: {}'.format(
|
|
args.schema_name,
|
|
e
|
|
)
|
|
)
|
|
|
|
t.stop()
|