Add indexes and sphinx.conf (conf and query stolen from https://github.com/Yuego/django-fias)

This commit is contained in:
Jack Stdin
2016-01-14 01:36:34 +03:00
parent 67f6943dce
commit 759efc43ee
14 changed files with 216 additions and 57 deletions

View File

@@ -3,6 +3,7 @@
import logging
import psycopg2
from bottle import template
from aore.aoutils.aoxmltableentry import AoXmlTableEntry
from aore.config import db as dbparams
@@ -14,28 +15,13 @@ class DbHandler:
def __init__(self):
self.db = DBImpl(psycopg2, dbparams)
f = open("aore/templates/postgre/bulk_create.sql")
self.syntax_bulk_create = f.read()
f.close()
f = open("aore/templates/postgre/bulk_update.sql")
self.syntax_bulk_update = f.read()
f.close()
f = open("aore/templates/postgre/bulk_delete.sql")
self.syntax_bulk_delete = f.read()
f.close()
def bulk_csv(self, operation_type, table_name, processed_count, csv_file_name):
sql_query = None
# simple add new reocrds
if operation_type == AoXmlTableEntry.OperationType.create:
sql_query = self.syntax_bulk_create \
.replace("%tab%", "\t") \
.replace("%tablename%", table_name) \
.replace("%fieldslist%", ", ".join(db_shemas[table_name].fields)) \
.replace("%csvname%", csv_file_name)
sql_query = template('aore/templates/postgre/bulk_create.sql', delim='\t', tablename=table_name,
fieldslist=", ".join(db_shemas[table_name].fields), csvname=csv_file_name)
# update table
if operation_type == AoXmlTableEntry.OperationType.update:
@@ -45,41 +31,34 @@ class DbHandler:
fields_update_list += "{}=EXCLUDED.{}, ".format(field, field)
fields_update_list = fields_update_list[:-2]
sql_query = self.syntax_bulk_update \
.replace("%tab%", "\t") \
.replace("%tablename%", table_name) \
.replace("%fieldslist%", ", ".join(db_shemas[table_name].fields)) \
.replace("%csvname%", csv_file_name) \
.replace("%uniquekey%", db_shemas[table_name].unique_field) \
.replace("%updaterule%", fields_update_list)
if table_name == "ADDROBJ":
sql_query += "DELETE FROM \"%tablename%\" WHERE %filterrule%;" \
.replace("%tablename%", table_name) \
.replace("%filterrule%",
"ACTSTATUS = FALSE OR NEXTID IS NOT NULL")
sql_query = template('aore/templates/postgre/bulk_update.sql', delim='\t', tablename=table_name,
fieldslist=", ".join(db_shemas[table_name].fields), csvname=csv_file_name,
uniquekey=db_shemas[table_name].unique_field, updaterule=fields_update_list)
# delete records from table
if operation_type == AoXmlTableEntry.OperationType.delete:
sql_query = self.syntax_bulk_delete \
.replace("%tab%", "\t") \
.replace("%tablename%", table_name) \
.replace("%fieldslist%", ", ".join(db_shemas[table_name].fields)) \
.replace("%csvname%", csv_file_name) \
.replace("%uniquekey%", db_shemas[table_name].unique_field)
sql_query = template('aore/templates/postgre/bulk_delete.sql', delim='\t', tablename=table_name,
fieldslist=", ".join(db_shemas[table_name].fields), csvname=csv_file_name,
uniquekey=db_shemas[table_name].unique_field)
assert sql_query, "Invalid operation type: {}".format(operation_type)
self.db.execute(sql_query)
logging.info("Processed {} queries FROM {}".format(processed_count-1, csv_file_name))
logging.info("Processed {} queries FROM {}".format(processed_count - 1, csv_file_name))
def pre_create(self):
f = open("aore/templates/postgre/pre_create.sql")
sql_query = f.read()
f.close()
logging.info("Prepare to create DB structure...")
sql_query = template("aore/templates/postgre/pre_create.sql")
self.db.execute(sql_query)
def post_create(self):
logging.info("Indexing ADDROBJ...")
sql_query = template("aore/templates/postgre/post_create.sql")
self.db.execute(sql_query)
logging.info("Indexing done.")
def pre_update(self):
# TODO: update actions
pass