Day 2, full DB import/update/delete from dir with XML or HTTP

This commit is contained in:
Jack Stdin
2016-01-13 17:38:01 +03:00
parent 3aeb00d82a
commit 67f6943dce
14 changed files with 146 additions and 67 deletions

View File

@@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
import logging
from traceback import format_exc
import psycopg2
from aore.aoutils.aoxmltableentry import AoXmlTableEntry
from aore.config import db as dbparams
from aore.dbutils.dbimpl import DBImpl
from aore.dbutils.dbschemas import db_shemas
@@ -12,37 +12,73 @@ from aore.dbutils.dbschemas import db_shemas
class DbHandler:
def __init__(self):
logging.basicConfig(format='%(asctime)s %(message)s')
self.db = DBImpl(psycopg2, dbparams)
def bulk_csv(self, chunk_size, table_name, csv_file_name):
sql_query = "COPY \"{}\" ({}) FROM '{}' DELIMITER '\t' NULL 'NULL'". \
format(table_name,
", ".join(
db_shemas[table_name].fields),
csv_file_name)
try:
cur = self.db.get_cursor()
cur.execute(sql_query)
self.db.transaction_commit()
except:
self.db.transaction_rollback()
raise BaseException("Error updating sql. Reason : {}".format(format_exc()))
f = open("aore/templates/postgre/bulk_create.sql")
self.syntax_bulk_create = f.read()
f.close()
logging.warning("Inserted {} queries FROM {}".format(chunk_size, csv_file_name))
f = open("aore/templates/postgre/bulk_update.sql")
self.syntax_bulk_update = f.read()
f.close()
f = open("aore/templates/postgre/bulk_delete.sql")
self.syntax_bulk_delete = f.read()
f.close()
def bulk_csv(self, operation_type, table_name, processed_count, csv_file_name):
sql_query = None
# simple add new reocrds
if operation_type == AoXmlTableEntry.OperationType.create:
sql_query = self.syntax_bulk_create \
.replace("%tab%", "\t") \
.replace("%tablename%", table_name) \
.replace("%fieldslist%", ", ".join(db_shemas[table_name].fields)) \
.replace("%csvname%", csv_file_name)
# update table
if operation_type == AoXmlTableEntry.OperationType.update:
fields_update_list = ""
for field in db_shemas[table_name].fields:
if field != db_shemas[table_name].unique_field.upper():
fields_update_list += "{}=EXCLUDED.{}, ".format(field, field)
fields_update_list = fields_update_list[:-2]
sql_query = self.syntax_bulk_update \
.replace("%tab%", "\t") \
.replace("%tablename%", table_name) \
.replace("%fieldslist%", ", ".join(db_shemas[table_name].fields)) \
.replace("%csvname%", csv_file_name) \
.replace("%uniquekey%", db_shemas[table_name].unique_field) \
.replace("%updaterule%", fields_update_list)
if table_name == "ADDROBJ":
sql_query += "DELETE FROM \"%tablename%\" WHERE %filterrule%;" \
.replace("%tablename%", table_name) \
.replace("%filterrule%",
"ACTSTATUS = FALSE OR NEXTID IS NOT NULL")
# delete records from table
if operation_type == AoXmlTableEntry.OperationType.delete:
sql_query = self.syntax_bulk_delete \
.replace("%tab%", "\t") \
.replace("%tablename%", table_name) \
.replace("%fieldslist%", ", ".join(db_shemas[table_name].fields)) \
.replace("%csvname%", csv_file_name) \
.replace("%uniquekey%", db_shemas[table_name].unique_field)
assert sql_query, "Invalid operation type: {}".format(operation_type)
self.db.execute(sql_query)
logging.info("Processed {} queries FROM {}".format(processed_count-1, csv_file_name))
def pre_create(self):
f = open("aore/templates/postgre/pre_create.sql")
create_db_syntax = f.read()
sql_query = f.read()
f.close()
try:
cur = self.db.get_cursor()
cur.execute(create_db_syntax)
self.db.transaction_commit()
except:
self.db.transaction_rollback()
raise "Error downloading. Reason : {}".format(format_exc())
self.db.execute(sql_query)
def pre_update(self):
# TODO: update actions

View File

@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
from traceback import format_exc
class DBImpl:
def __init__(self, engine, params):
@@ -19,6 +21,15 @@ class DBImpl:
def get_cursor(self):
return self.connection.cursor()
def execute(self, sql_query):
try:
cur = self.get_cursor()
cur.execute(sql_query)
self.transaction_commit()
except:
self.transaction_rollback()
raise BaseException("Error execute sql query. Reason : {}".format(format_exc()))
def get_rows(self, query_string, for_dict=True):
if for_dict:
cur = self.connection.cursor(self.db_engine.cursors.DictCursor)

View File

@@ -2,17 +2,20 @@
class DbSchema:
def __init__(self, name, fieldlist, xmltag):
def __init__(self, name, fieldlist, unique_key, xmltag):
self.tablename = name
self.fields = fieldlist
self.unique_field = unique_key
self.xml_tag = xmltag
db_shemas = dict()
db_shemas['ADDROBJ'] = DbSchema("ADDROBJ",
["AOID", "AOGUID", "SHORTNAME", "FORMALNAME", "AOLEVEL", "PARENTGUID", "ACTSTATUS",
"CURRSTATUS"],
"LIVESTATUS", "NEXTID"],
"aoid",
"Object")
db_shemas['SOCRBASE'] = DbSchema("SOCRBASE", ["LEVEL", "SOCRNAME", "SCNAME", "KOD_T_ST"], "AddressObjectType")
db_shemas['SOCRBASE'] = DbSchema("SOCRBASE", ["LEVEL", "SOCRNAME", "SCNAME", "KOD_T_ST"], "kod_t_st",
"AddressObjectType")
allowed_tables = ["ADDROBJ", "SOCRBASE"]