Compare commits
No commits in common. "9cd33c4818549f2d5bf0e66eefa39ca1c1f0ee16" and "87402c06fc55c0767579a1c0a8b3e5b18acb5b13" have entirely different histories.
9cd33c4818
...
87402c06fc
3 changed files with 98 additions and 224 deletions
134
create_view.py
134
create_view.py
|
@ -1,134 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
# import time
|
|
||||||
import logging
|
|
||||||
import argparse as arg
|
|
||||||
import psycopg2 as pg
|
|
||||||
from psycopg2.extensions import AsIs
|
|
||||||
|
|
||||||
from classes.CustomFormater import CustomFormatter
|
|
||||||
from classes.Timer import Timer
|
|
||||||
|
|
||||||
|
|
||||||
import locale
|
|
||||||
locale.setlocale(locale.LC_ALL, 'fr_FR.UTF-8')
|
|
||||||
|
|
||||||
# Schema name is NAMEDATALEN-1 (PGSQL source code)
|
|
||||||
# -> src/include/pg_config_manual.h
|
|
||||||
def check_schema_name(arg_value, pat=re.compile(r"^[a-z0-9A-Z]{1,63}$")):
|
|
||||||
if not pat.match(arg_value):
|
|
||||||
raise ValueError
|
|
||||||
return arg_value
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
|
||||||
"""
|
|
||||||
Parse arguments
|
|
||||||
"""
|
|
||||||
parser = arg.ArgumentParser('Process csv files from INSEE')
|
|
||||||
|
|
||||||
parser.add_argument('--source', '-s',
|
|
||||||
help='csv source directory',
|
|
||||||
default='exports')
|
|
||||||
|
|
||||||
parser.add_argument('--connection-file', '-f',
|
|
||||||
help='Postgresql connexion file',
|
|
||||||
default='.pgconn'
|
|
||||||
)
|
|
||||||
|
|
||||||
# As whe use AsIs function to not include '' in our sql queries for schema
|
|
||||||
# name, me mus ensure that it is not contains SQL special characters
|
|
||||||
# like comments --, final coma etc and avoid SQL injection
|
|
||||||
parser.add_argument('--schema-name',
|
|
||||||
help='Database schema name',
|
|
||||||
type=check_schema_name,
|
|
||||||
default='insee'
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument('--test', '-t',
|
|
||||||
help='test view, get 10 record of each',
|
|
||||||
action='store_true'
|
|
||||||
)
|
|
||||||
debug_group = parser.add_mutually_exclusive_group()
|
|
||||||
debug_group.add_argument('--verbose', '-V',
|
|
||||||
help='Verbose output',
|
|
||||||
action='store_true')
|
|
||||||
debug_group.add_argument('--debug', '-d',
|
|
||||||
help='Activate debug mode',
|
|
||||||
action='store_true')
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
args = parse_args()
|
|
||||||
t = Timer()
|
|
||||||
#logging.basicConfig(level=logging.DEBUG)
|
|
||||||
logger = logging.getLogger()
|
|
||||||
tty_handler = logging.StreamHandler()
|
|
||||||
|
|
||||||
# create console handler with a higher log level
|
|
||||||
tty_handler.setFormatter(CustomFormatter())
|
|
||||||
logger.addHandler(tty_handler)
|
|
||||||
|
|
||||||
if args.verbose is True:
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
logger.info('VERBOSE mode activated')
|
|
||||||
|
|
||||||
if args.debug is True:
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
logger.debug('DEBUG mode activated')
|
|
||||||
|
|
||||||
logging.debug('Import pgsql connection file {}'.format(args.connection_file))
|
|
||||||
with open(args.connection_file) as cf:
|
|
||||||
pg_conn = cf.read()
|
|
||||||
|
|
||||||
t.start('Add Views')
|
|
||||||
conn = pg.connect(pg_conn)
|
|
||||||
with conn.cursor() as curs:
|
|
||||||
|
|
||||||
logger.debug('Add view_indicateur_reg view')
|
|
||||||
try:
|
|
||||||
curs.execute("""
|
|
||||||
CREATE OR REPLACE VIEW %(schema)s.view_indicateur_dep AS
|
|
||||||
SELECT id_departement, d.dep, d.ncc, s.id_indicateur, s.date_debut, SUM(s.VALEUR)
|
|
||||||
FROM %(schema)s.departement d
|
|
||||||
INNER JOIN %(schema)s.commune c ON c.dep = d.dep
|
|
||||||
INNER JOIN %(schema)s.statistique s ON s.com = c.com
|
|
||||||
GROUP BY id_departement,d.dep, s.date_debut, d.ncc, s.id_indicateur
|
|
||||||
ORDER BY id_indicateur;""",
|
|
||||||
{'schema':AsIs(args.schema_name)})
|
|
||||||
conn.commit()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('Can\'t add view view_indicateur_dep: {}'.format(e))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
logger.debug('Add view_indicateur_reg view')
|
|
||||||
try:
|
|
||||||
curs.execute("""
|
|
||||||
CREATE OR REPLACE VIEW %(schema)s.view_indicateur_reg AS
|
|
||||||
SELECT id_region, r.reg, r.ncc, s.id_indicateur, s.date_debut, SUM(s.valeur)
|
|
||||||
FROM %(schema)s.region r
|
|
||||||
INNER JOIN %(schema)s.departement d ON d.reg = r.reg
|
|
||||||
INNER JOIN %(schema)s.commune c ON c.dep = d.dep
|
|
||||||
INNER JOIN %(schema)s.statistique s ON s.com = c.com
|
|
||||||
GROUP BY id_region, r.reg, s.date_debut, r.ncc, s.id_indicateur
|
|
||||||
ORDER BY id_indicateur;""",
|
|
||||||
{'schema':AsIs(args.schema_name)})
|
|
||||||
conn.commit()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('Can\'t add view view_indicateur_reg: {}'.format(e))
|
|
||||||
sys.exit(1)
|
|
||||||
t.stop()
|
|
||||||
|
|
||||||
if args.test:
|
|
||||||
for view in ('view_indicateur_dep','view_indicateur_reg'):
|
|
||||||
curs.execute("""
|
|
||||||
SELECT v.ncc, v.date_debut, v.sum
|
|
||||||
FROM %(schema)s.%(view)s v LIMIT 10;
|
|
||||||
""",
|
|
||||||
{'schema':AsIs(args.schema_name), 'view': AsIs(view)})
|
|
||||||
data = curs.fetchall()
|
|
||||||
print('\nCheck {}:'.format(view))
|
|
||||||
for row in data:
|
|
||||||
print('\t{:.<40}{}: {:>7n}'.format(*row))
|
|
|
@ -142,6 +142,72 @@ if __name__ == '__main__':
|
||||||
PRIMARY KEY (com,id_indicateur,date_debut)
|
PRIMARY KEY (com,id_indicateur,date_debut)
|
||||||
);""",
|
);""",
|
||||||
{'schema':AsIs(args.schema_name)})
|
{'schema':AsIs(args.schema_name)})
|
||||||
|
|
||||||
|
curs.execute("""
|
||||||
|
CREATE OR REPLACE VIEW %(schema)s.view_indicateur_dep AS
|
||||||
|
SELECT id_departement, d.dep, d.ncc, s.id_indicateur, s.date_debut, SUM(s.VALEUR)
|
||||||
|
FROM %(schema)s.departement d
|
||||||
|
INNER JOIN %(schema)s.commune c ON c.dep = d.dep
|
||||||
|
INNER JOIN %(schema)s.statistique s ON s.com = c.com
|
||||||
|
GROUP BY id_departement,d.dep, s.date_debut, d.ncc, s.id_indicateur
|
||||||
|
ORDER BY id_indicateur;""",
|
||||||
|
{'schema':AsIs(args.schema_name)})
|
||||||
|
|
||||||
|
curs.execute("""
|
||||||
|
CREATE OR REPLACE VIEW %(schema)s.view_indicateur_reg AS
|
||||||
|
SELECT id_region, r.reg, r.ncc, s.id_indicateur, s.date_debut, SUM(s.valeur)
|
||||||
|
FROM %(schema)s.region r
|
||||||
|
INNER JOIN %(schema)s.departement d ON d.reg = r.reg
|
||||||
|
INNER JOIN %(schema)s.commune c ON c.dep = d.dep
|
||||||
|
INNER JOIN %(schema)s.statistique s ON s.com = c.com
|
||||||
|
GROUP BY id_region, r.reg, s.date_debut, r.ncc, s.id_indicateur
|
||||||
|
ORDER BY id_indicateur;""",
|
||||||
|
{'schema':AsIs(args.schema_name)})
|
||||||
|
|
||||||
|
curs.execute("""
|
||||||
|
ALTER TABLE %(schema)s.region
|
||||||
|
ADD COLUMN IF NOT EXISTS population INT;
|
||||||
|
""",
|
||||||
|
{'schema':AsIs(args.schema_name)})
|
||||||
|
|
||||||
|
curs.execute("""
|
||||||
|
ALTER TABLE %(schema)s.departement
|
||||||
|
ADD COLUMN IF NOT EXISTS population INT;
|
||||||
|
""",
|
||||||
|
{'schema':AsIs(args.schema_name)})
|
||||||
|
|
||||||
|
curs.execute("""
|
||||||
|
CREATE OR REPLACE PROCEDURE %(schema)s.PRC_POP_REG_DEP()
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $$
|
||||||
|
DECLARE
|
||||||
|
REC RECORD;
|
||||||
|
BEGIN
|
||||||
|
|
||||||
|
FOR REC IN (SELECT id_departement, ncc, SUM valeur
|
||||||
|
FROM %(schema)s.view_indicateur_dep v
|
||||||
|
WHERE id_indicateur = 1
|
||||||
|
AND date_debut = '2018') LOOP
|
||||||
|
|
||||||
|
UPDATE %(schema)s.departement
|
||||||
|
SET population = REC.valeur
|
||||||
|
WHERE id_departement = REC.id_departement;
|
||||||
|
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
FOR REC IN (SELECT reg, SUM(population) valeur
|
||||||
|
FROM DEPARTEMENT d
|
||||||
|
GROUP BY REG) LOOP
|
||||||
|
|
||||||
|
UPDATE %(schema)s.region
|
||||||
|
SET population = REC.VALEUR
|
||||||
|
WHERE reg = REC.reg;
|
||||||
|
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$$;""",
|
||||||
|
{'schema':AsIs(args.schema_name)})
|
||||||
|
conn.commit()
|
||||||
t.stop()
|
t.stop()
|
||||||
|
|
||||||
t.start('Import data from csv files')
|
t.start('Import data from csv files')
|
||||||
|
@ -179,4 +245,36 @@ if __name__ == '__main__':
|
||||||
conn.commit()
|
conn.commit()
|
||||||
t.stop()
|
t.stop()
|
||||||
|
|
||||||
|
t.start('Call procedure')
|
||||||
|
with conn.cursor() as curs:
|
||||||
|
curs.execute("CALL %(schema)s.PRC_POP_REG_DEP()",
|
||||||
|
{'schema':AsIs(args.schema_name)}
|
||||||
|
)
|
||||||
|
t.stop()
|
||||||
|
|
||||||
|
t.start('Add Modify Trigger')
|
||||||
|
with conn.cursor() as curs:
|
||||||
|
curs.execute("""
|
||||||
|
CREATE OR REPLACE FUNCTION %(schema)s.block_maj_reg_dep()
|
||||||
|
RETURNS TRIGGER AS $bloquage$
|
||||||
|
BEGIN
|
||||||
|
RAISE EXCEPTION
|
||||||
|
'Mise à jour non autorisé';
|
||||||
|
END;
|
||||||
|
$bloquage$ language plpgsql;
|
||||||
|
|
||||||
|
CREATE TRIGGER TRG_BLOQ_MAJ_REG
|
||||||
|
BEFORE INSERT OR UPDATE OR DELETE ON %(schema)s.region
|
||||||
|
|
||||||
|
FOR EACH ROW EXECUTE PROCEDURE %(schema)s.block_maj_reg_dep();
|
||||||
|
|
||||||
|
CREATE TRIGGER TRG_BLOQ_MAJ_DEP
|
||||||
|
BEFORE INSERT OR UPDATE OR DELETE ON %(schema)s.departement
|
||||||
|
|
||||||
|
FOR EACH ROW EXECUTE PROCEDURE %(schema)s.block_maj_reg_dep();
|
||||||
|
""",
|
||||||
|
{'schema':AsIs(args.schema_name)})
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
t.stop()
|
||||||
conn.close()
|
conn.close()
|
|
@ -1,90 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
# import time
|
|
||||||
import logging
|
|
||||||
import argparse as arg
|
|
||||||
import psycopg2 as pg
|
|
||||||
from psycopg2.extensions import AsIs
|
|
||||||
|
|
||||||
from classes.CustomFormater import CustomFormatter
|
|
||||||
from classes.Timer import Timer
|
|
||||||
|
|
||||||
# Schema name is NAMEDATALEN-1 (PGSQL source code)
|
|
||||||
# -> src/include/pg_config_manual.h
|
|
||||||
def check_schema_name(arg_value, pat=re.compile(r"^[a-z0-9A-Z]{1,63}$")):
|
|
||||||
if not pat.match(arg_value):
|
|
||||||
raise ValueError
|
|
||||||
return arg_value
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
|
||||||
"""
|
|
||||||
Parse arguments
|
|
||||||
"""
|
|
||||||
parser = arg.ArgumentParser('Process csv files from INSEE')
|
|
||||||
|
|
||||||
parser.add_argument('--connection-file', '-f',
|
|
||||||
help='Postgresql connexion file',
|
|
||||||
default='.pgconn'
|
|
||||||
)
|
|
||||||
|
|
||||||
# As whe use AsIs function to not include '' in our sql queries for schema
|
|
||||||
# name, me mus ensure that it is not contains SQL special characters
|
|
||||||
# like comments --, final coma etc and avoid SQL injection
|
|
||||||
parser.add_argument('--schema-name',
|
|
||||||
help='Database schema name',
|
|
||||||
type=check_schema_name,
|
|
||||||
default='insee'
|
|
||||||
)
|
|
||||||
|
|
||||||
debug_group = parser.add_mutually_exclusive_group()
|
|
||||||
debug_group.add_argument('--verbose', '-V',
|
|
||||||
help='Verbose output',
|
|
||||||
action='store_true')
|
|
||||||
debug_group.add_argument('--debug', '-d',
|
|
||||||
help='Activate debug mode',
|
|
||||||
action='store_true')
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
args = parse_args()
|
|
||||||
#logging.basicConfig(level=logging.DEBUG)
|
|
||||||
logger = logging.getLogger()
|
|
||||||
tty_handler = logging.StreamHandler()
|
|
||||||
|
|
||||||
# create console handler with a higher log level
|
|
||||||
tty_handler.setFormatter(CustomFormatter())
|
|
||||||
logger.addHandler(tty_handler)
|
|
||||||
|
|
||||||
if args.verbose is True:
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
logger.info('VERBOSE mode activated')
|
|
||||||
|
|
||||||
if args.debug is True:
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
logger.debug('DEBUG mode activated')
|
|
||||||
|
|
||||||
t = Timer(logger=logger.info)
|
|
||||||
logging.debug('Import pgsql connection file {}'.format(args.connection_file))
|
|
||||||
with open(args.connection_file) as cf:
|
|
||||||
pg_conn = cf.read()
|
|
||||||
|
|
||||||
t.start('Delete schema')
|
|
||||||
logger.debug('Delete schema {}'.format(args.schema_name))
|
|
||||||
conn = pg.connect(pg_conn)
|
|
||||||
with conn.cursor() as curs:
|
|
||||||
try:
|
|
||||||
curs.execute('DROP SCHEMA IF EXISTS %s CASCADE',
|
|
||||||
(AsIs(args.schema_name),))
|
|
||||||
conn.commit()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('Can\'t drop schema {}: {}'.format(
|
|
||||||
args.schema_name,
|
|
||||||
e
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
t.stop()
|
|
Loading…
Add table
Add a link
Reference in a new issue