From 36550482178c2b51374b272fe44299a2812b9da9 Mon Sep 17 00:00:00 2001 From: Haukur Rosinkranz Date: Tue, 15 Sep 2015 09:53:54 +0200 Subject: [PATCH 01/15] temp fix for OB server --- openbazaard.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openbazaard.py b/openbazaard.py index f7fccbab..9e55b4d8 100644 --- a/openbazaard.py +++ b/openbazaard.py @@ -67,8 +67,8 @@ def on_bootstrap_complete(resp): kserver = Server(node, db, dht.constants.KSIZE, dht.constants.ALPHA, storage=PersistentStorage(db.DATABASE)) kserver.protocol.connect_multiplexer(protocol) kserver.bootstrap( - kserver.querySeed("162.213.253.147:8080", - "5b56c8daeb3b37c8a9b47be6102fa43b9f069f58dcb57475984041b26c99e389"))\ + kserver.querySeed("seed.openbazaar.org:8080", + "4b953c89a9e698e0cbff18811f849a4625c5895f6cc6b9c06d95d43f1c00959b"))\ .addCallback(on_bootstrap_complete) kserver.saveStateRegularly(DATA_FOLDER + 'cache.pickle', 10) From e812d26ce495eec386e02d18c984530250895557 Mon Sep 17 00:00:00 2001 From: Haukur Rosinkranz Date: Tue, 15 Sep 2015 19:40:59 +0200 Subject: [PATCH 02/15] test form --- test.html | 107 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 test.html diff --git a/test.html b/test.html new file mode 100644 index 00000000..2e4f1bb1 --- /dev/null +++ b/test.html @@ -0,0 +1,107 @@ + + +

Profile

+
+ Name:
+ Location:
+ handle:
+ about:
+ short description:
+ website:
+ email:
+ Avatar
+ Header
+ +
+

+
+
+
+ +
+

+
+ Account Type:
+ Username:
+ Proof URL: + +
+

Contract

+
+ Type:
+ Title:
+ Description:
+ Expiration Date:
+ Price:
+ BTC +
+ USD
+ NSFW
+ Processing Time:
+ Estimated Delivery Domestic:
+ Estimated Delivery International:
+ Category:
+ Condition:
+ SKU:
+ Terms & Conditions:
+ Return Policy:
+ Free Shipping
+ Shipping Currency:
+ BTC +
+ USD
+ Shipping Domestic:
+ Shipping International:
+ Shipping Origin:
+ Ships To:
+ Keyword:
+ Moderator:
+ + + Image
+ +
+

+

Purchase

+
+ Contract ID:
+ Quantity:
+ Name:
+ Address:
+ City:
+ State:
+ Postal Code:
+ Country:
+ Moderator:
+ +
+ + From 4a69399475266605e11d16e874b093f39d45c7ba Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Wed, 23 Sep 2015 23:29:02 +0200 Subject: [PATCH 03/15] Add base functionality for backup tool python code --- backupTool.py | 112 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 backupTool.py diff --git a/backupTool.py b/backupTool.py new file mode 100644 index 00000000..2f2648c1 --- /dev/null +++ b/backupTool.py @@ -0,0 +1,112 @@ +import csv +import db.datastore as db +import os +import re +import shutil +import sqlite3 as lite +import tarfile +import time + +TABLES = [ + ('hashmap', ['hash', 'filepath']), + ('profile', ['id', 'serializedUserInfo']), + ('listings', ['id', 'serializedListings']), + ('keys', ['type', 'privkey', 'pubkey']), + ('followers', ['id', 'serializedFollowers']), + ('following', ['id', 'serializedFollowing']), + ('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']), + ('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']), + ('vendors', ['guid', 'ip', 'port', 'signedPubkey']), + ('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']), + ('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']), + ('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']), + ('dht', ['keyword', 'id', 'value', 'birthday']) +] + +# TODO: Add all files and directories to back up together with the database +# Ex: ['file', 'path/to/file2', ...] +FILES = [] + +def _getDatabase(): + Database = db.Database() + return Database.DATABASE + +def _exportDatabaseToCsv(tablesAndColumns): + """Reads the database for all given tables and stores them as CSV files.""" + dbFile = _getDatabase() + result = None + with lite.connect(dbFile) as dbConnection: + dbConnection.text_factory = str + cursor = dbConnection.cursor() + for table in tablesAndColumns: + table_name = table[0] + table_columns = ', '.join(table[1]) + data = cursor.execute("SELECT {0} FROM {1}".format(table_columns, table_name)) + fileName = 'table_{0}.csv'.format(table_name) + filePath = os.path.join('backup', fileName) + with open(filePath, 'wb') as f: + writer = csv.writer(f) + writer.writerow(table[1]) + writer.writerows(data) + return result + +def backup(tablesAndColumns, files, output=None): + """Archives given tables and files in a single tar archive.""" + # Remove existing database files and re-make them + if os.path.exists('backup'): + shutil.rmtree('backup') + os.makedirs('backup') + _exportDatabaseToCsv(tablesAndColumns) + + # Archive files + if not output: + output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d')) + with tarfile.open(output, 'w:gz') as tar: + tar.add('backup') + for f in files: + tar.add(f) + tar.close() + +def _importCsvToTable(fileName, deleteDataFirst=False): + """Imports given CSV file to the database.""" + tableName = re.search('table_(\w+).csv', fileName).group(1) + dbFile = _getDatabase() + with lite.connect(dbFile) as dbConnection: + dbConnection.text_factory = str + cursor = dbConnection.cursor() + if deleteDataFirst: + cursor.execute('DELETE FROM {0}'.format(tableName)) + with open(fileName, 'rb') as f: + reader = csv.reader(f) + header = True + for row in reader: + if header: + header = False + columns = ', '.join(['?' for column in row]) + insertsql = 'INSERT INTO {0} VALUES ({1})'.format(tableName, columns) + rowlen = len(row) + else: + if len(row) == rowlen: + print 'Insert into {0}: {1}'.format(tableName, row) + print insertsql + cursor.execute(insertsql, row) + + +def restore(input, deleteTableDataFirst=False): + """Restores files and tables of given archive.""" + # Remove existing database files if any + if os.path.exists('backup'): + shutil.rmtree('backup') + + # Unarchive files + with tarfile.open(input, 'r:gz') as tar: + tar.extractall() + + # Restore database files to the database + if os.path.exists('backup'): + files = ['backup/{0}'.format(f) for f in os.listdir('backup')] + for f in files: + _importCsvToTable(f, deleteTableDataFirst) + +if __name__ == '__main__': + print 'Backup tool works as a library.' From cd97a3952edda72af7e44aa19cbfd53639835bff Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Fri, 25 Sep 2015 10:14:26 +0200 Subject: [PATCH 04/15] Remove print in code --- backupTool.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/backupTool.py b/backupTool.py index 2f2648c1..2bd79078 100644 --- a/backupTool.py +++ b/backupTool.py @@ -1,3 +1,4 @@ +__author__ = 'marc' import csv import db.datastore as db import os @@ -87,8 +88,6 @@ def _importCsvToTable(fileName, deleteDataFirst=False): rowlen = len(row) else: if len(row) == rowlen: - print 'Insert into {0}: {1}'.format(tableName, row) - print insertsql cursor.execute(insertsql, row) From ec1caf4ac1dd8399cc952fda5d227749eb83fb7e Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Fri, 25 Sep 2015 11:03:54 +0200 Subject: [PATCH 05/15] Updated backup to use generated OpenBazaar folder --- backupTool.py | 169 +++++++++++++++++++++++++------------------------- 1 file changed, 85 insertions(+), 84 deletions(-) diff --git a/backupTool.py b/backupTool.py index 2bd79078..450960e3 100644 --- a/backupTool.py +++ b/backupTool.py @@ -1,4 +1,5 @@ __author__ = 'marc' +from constants import DATA_FOLDER import csv import db.datastore as db import os @@ -9,103 +10,103 @@ import time TABLES = [ - ('hashmap', ['hash', 'filepath']), - ('profile', ['id', 'serializedUserInfo']), - ('listings', ['id', 'serializedListings']), - ('keys', ['type', 'privkey', 'pubkey']), - ('followers', ['id', 'serializedFollowers']), - ('following', ['id', 'serializedFollowing']), - ('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']), - ('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']), - ('vendors', ['guid', 'ip', 'port', 'signedPubkey']), - ('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']), - ('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']), - ('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']), - ('dht', ['keyword', 'id', 'value', 'birthday']) + ('hashmap', ['hash', 'filepath']), + ('profile', ['id', 'serializedUserInfo']), + ('listings', ['id', 'serializedListings']), + ('keys', ['type', 'privkey', 'pubkey']), + ('followers', ['id', 'serializedFollowers']), + ('following', ['id', 'serializedFollowing']), + ('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']), + ('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']), + ('vendors', ['guid', 'ip', 'port', 'signedPubkey']), + ('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']), + ('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']), + ('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']), + ('dht', ['keyword', 'id', 'value', 'birthday']) ] -# TODO: Add all files and directories to back up together with the database -# Ex: ['file', 'path/to/file2', ...] -FILES = [] - def _getDatabase(): - Database = db.Database() - return Database.DATABASE + Database = db.Database() + return Database.DATABASE def _exportDatabaseToCsv(tablesAndColumns): - """Reads the database for all given tables and stores them as CSV files.""" - dbFile = _getDatabase() - result = None - with lite.connect(dbFile) as dbConnection: - dbConnection.text_factory = str - cursor = dbConnection.cursor() - for table in tablesAndColumns: - table_name = table[0] - table_columns = ', '.join(table[1]) - data = cursor.execute("SELECT {0} FROM {1}".format(table_columns, table_name)) - fileName = 'table_{0}.csv'.format(table_name) - filePath = os.path.join('backup', fileName) - with open(filePath, 'wb') as f: - writer = csv.writer(f) - writer.writerow(table[1]) - writer.writerows(data) - return result + """Reads the database for all given tables and stores them as CSV files.""" + dbFile = _getDatabase() + result = None + with lite.connect(dbFile) as dbConnection: + dbConnection.text_factory = str + cursor = dbConnection.cursor() + for table in tablesAndColumns: + table_name = table[0] + table_columns = ', '.join(table[1]) + data = cursor.execute("SELECT {0} FROM {1}".format(table_columns, table_name)) + fileName = 'table_{0}.csv'.format(table_name) + filePath = os.path.join('backup', fileName) + with open(filePath, 'wb') as f: + writer = csv.writer(f) + writer.writerow(table[1]) + writer.writerows(data) + return result + +def backup(tablesAndColumns=None, output=None): + """Archives given tables and files in a single tar archive.""" + os.chdir(DATA_FOLDER) -def backup(tablesAndColumns, files, output=None): - """Archives given tables and files in a single tar archive.""" - # Remove existing database files and re-make them - if os.path.exists('backup'): - shutil.rmtree('backup') - os.makedirs('backup') - _exportDatabaseToCsv(tablesAndColumns) + # Remove existing database files and re-make them + if os.path.exists('backup'): + shutil.rmtree('backup') + os.makedirs('backup') + _exportDatabaseToCsv(tablesAndColumns) - # Archive files - if not output: - output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d')) - with tarfile.open(output, 'w:gz') as tar: - tar.add('backup') - for f in files: - tar.add(f) - tar.close() + # Archive files + files = os.listdir(DATA_FOLDER) + if not output: + output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d')) + with tarfile.open(output, 'w:gz') as tar: + for f in files: + tar.add(f) + tar.close() def _importCsvToTable(fileName, deleteDataFirst=False): - """Imports given CSV file to the database.""" - tableName = re.search('table_(\w+).csv', fileName).group(1) - dbFile = _getDatabase() - with lite.connect(dbFile) as dbConnection: - dbConnection.text_factory = str - cursor = dbConnection.cursor() - if deleteDataFirst: - cursor.execute('DELETE FROM {0}'.format(tableName)) - with open(fileName, 'rb') as f: - reader = csv.reader(f) - header = True - for row in reader: - if header: - header = False - columns = ', '.join(['?' for column in row]) - insertsql = 'INSERT INTO {0} VALUES ({1})'.format(tableName, columns) - rowlen = len(row) - else: - if len(row) == rowlen: - cursor.execute(insertsql, row) + """Imports given CSV file to the database.""" + tableName = re.search('table_(\w+).csv', fileName).group(1) + dbFile = _getDatabase() + with lite.connect(dbFile) as dbConnection: + dbConnection.text_factory = str + cursor = dbConnection.cursor() + if deleteDataFirst: + cursor.execute('DELETE FROM {0}'.format(tableName)) + with open(fileName, 'rb') as f: + reader = csv.reader(f) + header = True + for row in reader: + if header: + header = False + columns = ', '.join(['?' for column in row]) + insertsql = 'INSERT INTO {0} VALUES ({1})'.format(tableName, columns) + rowlen = len(row) + else: + if len(row) == rowlen: + cursor.execute(insertsql, row) def restore(input, deleteTableDataFirst=False): - """Restores files and tables of given archive.""" - # Remove existing database files if any - if os.path.exists('backup'): - shutil.rmtree('backup') + """Restores files and tables of given archive.""" + os.chdir(DATA_FOLDER) + + # Remove existing database files if any + if os.path.exists('backup'): + shutil.rmtree('backup') - # Unarchive files - with tarfile.open(input, 'r:gz') as tar: - tar.extractall() + # Unarchive files + with tarfile.open(input, 'r:gz') as tar: + tar.extractall() - # Restore database files to the database - if os.path.exists('backup'): - files = ['backup/{0}'.format(f) for f in os.listdir('backup')] - for f in files: - _importCsvToTable(f, deleteTableDataFirst) + # Restore database files to the database + if os.path.exists('backup'): + files = ['backup/{0}'.format(f) for f in os.listdir('backup')] + for f in files: + _importCsvToTable(f, deleteTableDataFirst) if __name__ == '__main__': - print 'Backup tool works as a library.' + print 'Backup tool works as a library.' From 8b01ed3ef7359879601a8c7b42f548e2b778e841 Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Fri, 25 Sep 2015 13:07:32 +0200 Subject: [PATCH 06/15] Add API support for backup tool --- api/restapi.py | 13 ++++++++++++ backupTool.py | 57 +++++++++++++++++++++++++++++++------------------- 2 files changed, 49 insertions(+), 21 deletions(-) diff --git a/api/restapi.py b/api/restapi.py index 17ca542a..3c3f3f82 100644 --- a/api/restapi.py +++ b/api/restapi.py @@ -1,4 +1,5 @@ __author__ = 'chris' +import backupTool import json import os from txrestapi.resource import APIResource @@ -470,3 +471,15 @@ def get_node(node): seller_guid = unhexlify(c.contract["vendor_offer"]["listing"]["id"]["guid"]) self.kserver.resolve(seller_guid).addCallback(get_node) return server.NOT_DONE_YET + + @POST('^/api/v1/backup') + def backup(self, request): + tablesAndColumns = request.args["tablesAndColumns"][0] + output = request.args["output"][0] + return backupTool.backup(tablesAndColumns, output) + + @POST('^/api/v1/restore') + def restore(self, request): + input = request.args["input"][0] + deleteTableDataFirst = request.args["deleteTableDataFirst"][0] + return backupTool.restore(input, deleteTableDataFirst) diff --git a/backupTool.py b/backupTool.py index 450960e3..5ee6f820 100644 --- a/backupTool.py +++ b/backupTool.py @@ -9,21 +9,21 @@ import tarfile import time -TABLES = [ - ('hashmap', ['hash', 'filepath']), - ('profile', ['id', 'serializedUserInfo']), - ('listings', ['id', 'serializedListings']), - ('keys', ['type', 'privkey', 'pubkey']), - ('followers', ['id', 'serializedFollowers']), - ('following', ['id', 'serializedFollowing']), - ('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']), - ('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']), - ('vendors', ['guid', 'ip', 'port', 'signedPubkey']), - ('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']), - ('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']), - ('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']), - ('dht', ['keyword', 'id', 'value', 'birthday']) -] +TABLES = { + 'hashmap': ['hash', 'filepath'], + 'profile': ['id', 'serializedUserInfo'], + 'listings': ['id', 'serializedListings'], + 'keys': ['type', 'privkey', 'pubkey'], + 'followers': ['id', 'serializedFollowers'], + 'following': ['id', 'serializedFollowing'], + 'messages': ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing'], + 'notifications': ['guid', 'handle', 'message', 'timestamp', 'avatar_hash'], + 'vendors': ['guid', 'ip', 'port', 'signedPubkey'], + 'moderators': ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle'], + 'purchases': ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig'], + 'sales': ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller'], + 'dht': ['keyword', 'id', 'value', 'birthday'] +} def _getDatabase(): Database = db.Database() @@ -48,15 +48,25 @@ def _exportDatabaseToCsv(tablesAndColumns): writer.writerows(data) return result -def backup(tablesAndColumns=None, output=None): +def backup(tableList=None, output=None): """Archives given tables and files in a single tar archive.""" os.chdir(DATA_FOLDER) - # Remove existing database files and re-make them - if os.path.exists('backup'): - shutil.rmtree('backup') - os.makedirs('backup') - _exportDatabaseToCsv(tablesAndColumns) + if tableList: + # Parse table list + tableList = tableList.replace(' ', '').split(',') + tablesAndColumns = [] + for table in tableList: + if table in TABLES: + tablesAndColumns.append((table, TABLES[table])) + else: + return 'ERROR, Table not found: {0}'.format(table) + + # Remove existing database files and re-make them + if os.path.exists('backup'): + shutil.rmtree('backup') + os.makedirs('backup') + _exportDatabaseToCsv(tablesAndColumns) # Archive files files = os.listdir(DATA_FOLDER) @@ -66,6 +76,7 @@ def backup(tablesAndColumns=None, output=None): for f in files: tar.add(f) tar.close() + return 'Success' def _importCsvToTable(fileName, deleteDataFirst=False): """Imports given CSV file to the database.""" @@ -92,6 +103,8 @@ def _importCsvToTable(fileName, deleteDataFirst=False): def restore(input, deleteTableDataFirst=False): """Restores files and tables of given archive.""" + if not input: + return 'Input path is needed' os.chdir(DATA_FOLDER) # Remove existing database files if any @@ -108,5 +121,7 @@ def restore(input, deleteTableDataFirst=False): for f in files: _importCsvToTable(f, deleteTableDataFirst) + return 'Success' + if __name__ == '__main__': print 'Backup tool works as a library.' From 1e8a6ec85d4fe32a5b44ff8e523a59c88b395f58 Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Fri, 25 Sep 2015 13:17:37 +0200 Subject: [PATCH 07/15] Add backup test html --- backup.html | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 backup.html diff --git a/backup.html b/backup.html new file mode 100644 index 00000000..446b0691 --- /dev/null +++ b/backup.html @@ -0,0 +1,35 @@ + + +Underscored fields are required
+Example data is provided below each input
+
+Backup
+
+
+List of tables to backup (comma separated):
+
+hashmap, profile, listings
+
+Name of file to create (don't forget to add .tar.gz), leave empty for todays date:
+
+myveryspecialbackup.tar.gz
+
+ +
+
+
+Restore
+
+
+Input file:
+
+myveryspecialbackup.tar.gz
+
+Boolean, if data should be removed in the table before input:
+
+True for delete, False or empty for append
+
+ + + + \ No newline at end of file From d18ca0aa9944960f412794e9620f56391e9eba05 Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Tue, 29 Sep 2015 11:12:32 +0200 Subject: [PATCH 08/15] Separate archive from database export --- api/restapi.py | 26 ++++++++++++------ backup.html | 57 ++++++++++++++++++++++++---------------- backupTool.py | 71 +++++++++++++++++++++++++++++++------------------- 3 files changed, 96 insertions(+), 58 deletions(-) diff --git a/api/restapi.py b/api/restapi.py index 3c3f3f82..5425b96a 100644 --- a/api/restapi.py +++ b/api/restapi.py @@ -472,14 +472,24 @@ def get_node(node): self.kserver.resolve(seller_guid).addCallback(get_node) return server.NOT_DONE_YET - @POST('^/api/v1/backup') - def backup(self, request): - tablesAndColumns = request.args["tablesAndColumns"][0] + @POST('^/api/v1/backup_files') + def backup_files(self, request): output = request.args["output"][0] - return backupTool.backup(tablesAndColumns, output) + return backupTool.backupFiles(output) - @POST('^/api/v1/restore') - def restore(self, request): + @POST('^/api/v1/export_database') + def export_database(self, request): + tables_and_columns = request.args["tables_and_columns"][0] + remove_previous = request.args["remove_previous"][0] + return backupTool.exportDatabase(tables_and_columns, remove_previous) + + @POST('^/api/v1/restore_files') + def restore_files(self, request): input = request.args["input"][0] - deleteTableDataFirst = request.args["deleteTableDataFirst"][0] - return backupTool.restore(input, deleteTableDataFirst) + remove_previous_database_files = request.args["remove_previous_database_files"][0] + return backupTool.restoreFiles(input, remove_previous_database_files) + + @POST('^/api/v1/import_database') + def import_database(self, request): + remove_previous = request.args["remove_previous"][0] + return backupTool.importDatabase(remove_previous) diff --git a/backup.html b/backup.html index 446b0691..2904652a 100644 --- a/backup.html +++ b/backup.html @@ -3,33 +3,44 @@ Underscored fields are required
Example data is provided below each input

-Backup
-
- -List of tables to backup (comma separated):
-
-hashmap, profile, listings
-
+ +Backup files

+ Name of file to create (don't forget to add .tar.gz), leave empty for todays date:
-
-myveryspecialbackup.tar.gz
-
- - -
-
-Restore
-
-
+
+myveryspecialbackup.tar.gz

+ +


+ +Export database

+
+List of tables to backup (comma separated):
+
+hashmap, profile, listings

+If previous table files should be removed before export:
+
+True / False (default)

+ +


+ +Restore Files

+
Input file:

-myveryspecialbackup.tar.gz
-
-Boolean, if data should be removed in the table before input:
-
-True for delete, False or empty for append
-
+myveryspecialbackup.tar.gz

+If data should be removed in the table before input:
+
+True / False (default)

+


+ +Import database

+
+If previous table files should be removed before import:
+
+True / False (default)

+ +


\ No newline at end of file diff --git a/backupTool.py b/backupTool.py index 5ee6f820..39914a45 100644 --- a/backupTool.py +++ b/backupTool.py @@ -2,6 +2,7 @@ from constants import DATA_FOLDER import csv import db.datastore as db +import errno import os import re import shutil @@ -9,7 +10,7 @@ import tarfile import time -TABLES = { +_TABLES = { 'hashmap': ['hash', 'filepath'], 'profile': ['id', 'serializedUserInfo'], 'listings': ['id', 'serializedListings'], @@ -26,9 +27,18 @@ } def _getDatabase(): + """Retrieves the OpenBazaar database file.""" Database = db.Database() return Database.DATABASE +def silentRemove(filename): + """Silently removes a file if it exists.""" + try: + os.remove(filename) + except OSError as e: + if e.errno != errno.ENOENT: # ENOENT: no such file or directory + raise + def _exportDatabaseToCsv(tablesAndColumns): """Reads the database for all given tables and stores them as CSV files.""" dbFile = _getDatabase() @@ -48,36 +58,40 @@ def _exportDatabaseToCsv(tablesAndColumns): writer.writerows(data) return result -def backup(tableList=None, output=None): - """Archives given tables and files in a single tar archive.""" +def backupFiles(output=None): + """Archives OpenBazaar files in a single tar archive.""" os.chdir(DATA_FOLDER) - if tableList: - # Parse table list - tableList = tableList.replace(' ', '').split(',') - tablesAndColumns = [] - for table in tableList: - if table in TABLES: - tablesAndColumns.append((table, TABLES[table])) - else: - return 'ERROR, Table not found: {0}'.format(table) - - # Remove existing database files and re-make them - if os.path.exists('backup'): - shutil.rmtree('backup') - os.makedirs('backup') - _exportDatabaseToCsv(tablesAndColumns) - # Archive files files = os.listdir(DATA_FOLDER) if not output: output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d')) + silentRemove(output) with tarfile.open(output, 'w:gz') as tar: for f in files: tar.add(f) tar.close() return 'Success' +def exportDatabase(tableList, removePrevious=False): + """Exports given tables to the OpenBazaar folder.""" + # Parse table list + tableList = tableList.replace(' ', '').split(',') + tablesAndColumns = [] + for table in tableList: + if table in _TABLES: + tablesAndColumns.append((table, _TABLES[table])) + else: + return 'ERROR, Table not found: {0}'.format(table) + + # Remove existing database files and re-make them + if removePrevious and os.path.exists('backup'): + shutil.rmtree('backup') + if not os.path.exists('backup'): + os.makedirs('backup') + _exportDatabaseToCsv(tablesAndColumns) + return 'Success' + def _importCsvToTable(fileName, deleteDataFirst=False): """Imports given CSV file to the database.""" tableName = re.search('table_(\w+).csv', fileName).group(1) @@ -101,27 +115,30 @@ def _importCsvToTable(fileName, deleteDataFirst=False): cursor.execute(insertsql, row) -def restore(input, deleteTableDataFirst=False): - """Restores files and tables of given archive.""" +def restoreFiles(input, removePreviousDatabaseFiles=False): + """Restores files of given archive to OpenBazaar folder.""" if not input: return 'Input path is needed' os.chdir(DATA_FOLDER) # Remove existing database files if any - if os.path.exists('backup'): + if removePreviousDatabaseFiles and os.path.exists('backup'): shutil.rmtree('backup') # Unarchive files with tarfile.open(input, 'r:gz') as tar: tar.extractall() + return 'Success' + +if __name__ == '__main__': + print 'Backup tool works as a library.' + +def importDatabase(deletePreviousData=False): + """Imports table files from the OpenBazaar folder.""" # Restore database files to the database if os.path.exists('backup'): files = ['backup/{0}'.format(f) for f in os.listdir('backup')] for f in files: - _importCsvToTable(f, deleteTableDataFirst) - + _importCsvToTable(f, deletePreviousData) return 'Success' - -if __name__ == '__main__': - print 'Backup tool works as a library.' From 4469bf658e17a8697f286e0296d93b798de8aa50 Mon Sep 17 00:00:00 2001 From: Haukur Rosinkranz Date: Tue, 15 Sep 2015 19:40:59 +0200 Subject: [PATCH 09/15] test form --- test.html | 107 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 test.html diff --git a/test.html b/test.html new file mode 100644 index 00000000..2e4f1bb1 --- /dev/null +++ b/test.html @@ -0,0 +1,107 @@ + + +

Profile

+
+ Name:
+ Location:
+ handle:
+ about:
+ short description:
+ website:
+ email:
+ Avatar
+ Header
+ +
+

+
+
+
+ +
+

+
+ Account Type:
+ Username:
+ Proof URL: + +
+

Contract

+
+ Type:
+ Title:
+ Description:
+ Expiration Date:
+ Price:
+ BTC +
+ USD
+ NSFW
+ Processing Time:
+ Estimated Delivery Domestic:
+ Estimated Delivery International:
+ Category:
+ Condition:
+ SKU:
+ Terms & Conditions:
+ Return Policy:
+ Free Shipping
+ Shipping Currency:
+ BTC +
+ USD
+ Shipping Domestic:
+ Shipping International:
+ Shipping Origin:
+ Ships To:
+ Keyword:
+ Moderator:
+ + + Image
+ +
+

+

Purchase

+
+ Contract ID:
+ Quantity:
+ Name:
+ Address:
+ City:
+ State:
+ Postal Code:
+ Country:
+ Moderator:
+ +
+ + From 4949aa680e27e87e83352694ab8733eff0390f48 Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Wed, 23 Sep 2015 23:29:02 +0200 Subject: [PATCH 10/15] Add base functionality for backup tool python code --- backupTool.py | 112 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 backupTool.py diff --git a/backupTool.py b/backupTool.py new file mode 100644 index 00000000..2f2648c1 --- /dev/null +++ b/backupTool.py @@ -0,0 +1,112 @@ +import csv +import db.datastore as db +import os +import re +import shutil +import sqlite3 as lite +import tarfile +import time + +TABLES = [ + ('hashmap', ['hash', 'filepath']), + ('profile', ['id', 'serializedUserInfo']), + ('listings', ['id', 'serializedListings']), + ('keys', ['type', 'privkey', 'pubkey']), + ('followers', ['id', 'serializedFollowers']), + ('following', ['id', 'serializedFollowing']), + ('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']), + ('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']), + ('vendors', ['guid', 'ip', 'port', 'signedPubkey']), + ('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']), + ('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']), + ('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']), + ('dht', ['keyword', 'id', 'value', 'birthday']) +] + +# TODO: Add all files and directories to back up together with the database +# Ex: ['file', 'path/to/file2', ...] +FILES = [] + +def _getDatabase(): + Database = db.Database() + return Database.DATABASE + +def _exportDatabaseToCsv(tablesAndColumns): + """Reads the database for all given tables and stores them as CSV files.""" + dbFile = _getDatabase() + result = None + with lite.connect(dbFile) as dbConnection: + dbConnection.text_factory = str + cursor = dbConnection.cursor() + for table in tablesAndColumns: + table_name = table[0] + table_columns = ', '.join(table[1]) + data = cursor.execute("SELECT {0} FROM {1}".format(table_columns, table_name)) + fileName = 'table_{0}.csv'.format(table_name) + filePath = os.path.join('backup', fileName) + with open(filePath, 'wb') as f: + writer = csv.writer(f) + writer.writerow(table[1]) + writer.writerows(data) + return result + +def backup(tablesAndColumns, files, output=None): + """Archives given tables and files in a single tar archive.""" + # Remove existing database files and re-make them + if os.path.exists('backup'): + shutil.rmtree('backup') + os.makedirs('backup') + _exportDatabaseToCsv(tablesAndColumns) + + # Archive files + if not output: + output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d')) + with tarfile.open(output, 'w:gz') as tar: + tar.add('backup') + for f in files: + tar.add(f) + tar.close() + +def _importCsvToTable(fileName, deleteDataFirst=False): + """Imports given CSV file to the database.""" + tableName = re.search('table_(\w+).csv', fileName).group(1) + dbFile = _getDatabase() + with lite.connect(dbFile) as dbConnection: + dbConnection.text_factory = str + cursor = dbConnection.cursor() + if deleteDataFirst: + cursor.execute('DELETE FROM {0}'.format(tableName)) + with open(fileName, 'rb') as f: + reader = csv.reader(f) + header = True + for row in reader: + if header: + header = False + columns = ', '.join(['?' for column in row]) + insertsql = 'INSERT INTO {0} VALUES ({1})'.format(tableName, columns) + rowlen = len(row) + else: + if len(row) == rowlen: + print 'Insert into {0}: {1}'.format(tableName, row) + print insertsql + cursor.execute(insertsql, row) + + +def restore(input, deleteTableDataFirst=False): + """Restores files and tables of given archive.""" + # Remove existing database files if any + if os.path.exists('backup'): + shutil.rmtree('backup') + + # Unarchive files + with tarfile.open(input, 'r:gz') as tar: + tar.extractall() + + # Restore database files to the database + if os.path.exists('backup'): + files = ['backup/{0}'.format(f) for f in os.listdir('backup')] + for f in files: + _importCsvToTable(f, deleteTableDataFirst) + +if __name__ == '__main__': + print 'Backup tool works as a library.' From e64a0e25c05985f4f2c552c7b6c49b261e085f2b Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Fri, 25 Sep 2015 10:14:26 +0200 Subject: [PATCH 11/15] Remove print in code --- backupTool.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/backupTool.py b/backupTool.py index 2f2648c1..2bd79078 100644 --- a/backupTool.py +++ b/backupTool.py @@ -1,3 +1,4 @@ +__author__ = 'marc' import csv import db.datastore as db import os @@ -87,8 +88,6 @@ def _importCsvToTable(fileName, deleteDataFirst=False): rowlen = len(row) else: if len(row) == rowlen: - print 'Insert into {0}: {1}'.format(tableName, row) - print insertsql cursor.execute(insertsql, row) From ee4bd6c8de91c6ff6c8ea8ec7e828faccbe69839 Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Fri, 25 Sep 2015 11:03:54 +0200 Subject: [PATCH 12/15] Updated backup to use generated OpenBazaar folder --- backupTool.py | 169 +++++++++++++++++++++++++------------------------- 1 file changed, 85 insertions(+), 84 deletions(-) diff --git a/backupTool.py b/backupTool.py index 2bd79078..450960e3 100644 --- a/backupTool.py +++ b/backupTool.py @@ -1,4 +1,5 @@ __author__ = 'marc' +from constants import DATA_FOLDER import csv import db.datastore as db import os @@ -9,103 +10,103 @@ import time TABLES = [ - ('hashmap', ['hash', 'filepath']), - ('profile', ['id', 'serializedUserInfo']), - ('listings', ['id', 'serializedListings']), - ('keys', ['type', 'privkey', 'pubkey']), - ('followers', ['id', 'serializedFollowers']), - ('following', ['id', 'serializedFollowing']), - ('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']), - ('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']), - ('vendors', ['guid', 'ip', 'port', 'signedPubkey']), - ('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']), - ('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']), - ('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']), - ('dht', ['keyword', 'id', 'value', 'birthday']) + ('hashmap', ['hash', 'filepath']), + ('profile', ['id', 'serializedUserInfo']), + ('listings', ['id', 'serializedListings']), + ('keys', ['type', 'privkey', 'pubkey']), + ('followers', ['id', 'serializedFollowers']), + ('following', ['id', 'serializedFollowing']), + ('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']), + ('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']), + ('vendors', ['guid', 'ip', 'port', 'signedPubkey']), + ('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']), + ('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']), + ('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']), + ('dht', ['keyword', 'id', 'value', 'birthday']) ] -# TODO: Add all files and directories to back up together with the database -# Ex: ['file', 'path/to/file2', ...] -FILES = [] - def _getDatabase(): - Database = db.Database() - return Database.DATABASE + Database = db.Database() + return Database.DATABASE def _exportDatabaseToCsv(tablesAndColumns): - """Reads the database for all given tables and stores them as CSV files.""" - dbFile = _getDatabase() - result = None - with lite.connect(dbFile) as dbConnection: - dbConnection.text_factory = str - cursor = dbConnection.cursor() - for table in tablesAndColumns: - table_name = table[0] - table_columns = ', '.join(table[1]) - data = cursor.execute("SELECT {0} FROM {1}".format(table_columns, table_name)) - fileName = 'table_{0}.csv'.format(table_name) - filePath = os.path.join('backup', fileName) - with open(filePath, 'wb') as f: - writer = csv.writer(f) - writer.writerow(table[1]) - writer.writerows(data) - return result + """Reads the database for all given tables and stores them as CSV files.""" + dbFile = _getDatabase() + result = None + with lite.connect(dbFile) as dbConnection: + dbConnection.text_factory = str + cursor = dbConnection.cursor() + for table in tablesAndColumns: + table_name = table[0] + table_columns = ', '.join(table[1]) + data = cursor.execute("SELECT {0} FROM {1}".format(table_columns, table_name)) + fileName = 'table_{0}.csv'.format(table_name) + filePath = os.path.join('backup', fileName) + with open(filePath, 'wb') as f: + writer = csv.writer(f) + writer.writerow(table[1]) + writer.writerows(data) + return result + +def backup(tablesAndColumns=None, output=None): + """Archives given tables and files in a single tar archive.""" + os.chdir(DATA_FOLDER) -def backup(tablesAndColumns, files, output=None): - """Archives given tables and files in a single tar archive.""" - # Remove existing database files and re-make them - if os.path.exists('backup'): - shutil.rmtree('backup') - os.makedirs('backup') - _exportDatabaseToCsv(tablesAndColumns) + # Remove existing database files and re-make them + if os.path.exists('backup'): + shutil.rmtree('backup') + os.makedirs('backup') + _exportDatabaseToCsv(tablesAndColumns) - # Archive files - if not output: - output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d')) - with tarfile.open(output, 'w:gz') as tar: - tar.add('backup') - for f in files: - tar.add(f) - tar.close() + # Archive files + files = os.listdir(DATA_FOLDER) + if not output: + output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d')) + with tarfile.open(output, 'w:gz') as tar: + for f in files: + tar.add(f) + tar.close() def _importCsvToTable(fileName, deleteDataFirst=False): - """Imports given CSV file to the database.""" - tableName = re.search('table_(\w+).csv', fileName).group(1) - dbFile = _getDatabase() - with lite.connect(dbFile) as dbConnection: - dbConnection.text_factory = str - cursor = dbConnection.cursor() - if deleteDataFirst: - cursor.execute('DELETE FROM {0}'.format(tableName)) - with open(fileName, 'rb') as f: - reader = csv.reader(f) - header = True - for row in reader: - if header: - header = False - columns = ', '.join(['?' for column in row]) - insertsql = 'INSERT INTO {0} VALUES ({1})'.format(tableName, columns) - rowlen = len(row) - else: - if len(row) == rowlen: - cursor.execute(insertsql, row) + """Imports given CSV file to the database.""" + tableName = re.search('table_(\w+).csv', fileName).group(1) + dbFile = _getDatabase() + with lite.connect(dbFile) as dbConnection: + dbConnection.text_factory = str + cursor = dbConnection.cursor() + if deleteDataFirst: + cursor.execute('DELETE FROM {0}'.format(tableName)) + with open(fileName, 'rb') as f: + reader = csv.reader(f) + header = True + for row in reader: + if header: + header = False + columns = ', '.join(['?' for column in row]) + insertsql = 'INSERT INTO {0} VALUES ({1})'.format(tableName, columns) + rowlen = len(row) + else: + if len(row) == rowlen: + cursor.execute(insertsql, row) def restore(input, deleteTableDataFirst=False): - """Restores files and tables of given archive.""" - # Remove existing database files if any - if os.path.exists('backup'): - shutil.rmtree('backup') + """Restores files and tables of given archive.""" + os.chdir(DATA_FOLDER) + + # Remove existing database files if any + if os.path.exists('backup'): + shutil.rmtree('backup') - # Unarchive files - with tarfile.open(input, 'r:gz') as tar: - tar.extractall() + # Unarchive files + with tarfile.open(input, 'r:gz') as tar: + tar.extractall() - # Restore database files to the database - if os.path.exists('backup'): - files = ['backup/{0}'.format(f) for f in os.listdir('backup')] - for f in files: - _importCsvToTable(f, deleteTableDataFirst) + # Restore database files to the database + if os.path.exists('backup'): + files = ['backup/{0}'.format(f) for f in os.listdir('backup')] + for f in files: + _importCsvToTable(f, deleteTableDataFirst) if __name__ == '__main__': - print 'Backup tool works as a library.' + print 'Backup tool works as a library.' From 54cd32562e6a9263df5465519fedddea5a783619 Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Fri, 25 Sep 2015 13:17:37 +0200 Subject: [PATCH 13/15] Add backup test html --- backup.html | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 backup.html diff --git a/backup.html b/backup.html new file mode 100644 index 00000000..446b0691 --- /dev/null +++ b/backup.html @@ -0,0 +1,35 @@ + + +Underscored fields are required
+Example data is provided below each input
+
+Backup
+
+
+List of tables to backup (comma separated):
+
+hashmap, profile, listings
+
+Name of file to create (don't forget to add .tar.gz), leave empty for todays date:
+
+myveryspecialbackup.tar.gz
+
+ +
+
+
+Restore
+
+
+Input file:
+
+myveryspecialbackup.tar.gz
+
+Boolean, if data should be removed in the table before input:
+
+True for delete, False or empty for append
+
+ + + + \ No newline at end of file From 1e2daefaccc5749444f5bd936a154455a199e224 Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Fri, 13 Nov 2015 14:46:38 +0100 Subject: [PATCH 14/15] Add test for backup tool --- db/tests/test_backuptool.py | 47 +++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 db/tests/test_backuptool.py diff --git a/db/tests/test_backuptool.py b/db/tests/test_backuptool.py new file mode 100644 index 00000000..818b2035 --- /dev/null +++ b/db/tests/test_backuptool.py @@ -0,0 +1,47 @@ +"""Test for db/backuptool""" +from constants import DATA_FOLDER +import os +import unittest + +import db.backuptool as bt + +TEST_FOLDER = 'test' +TEST_TAR = 'test.tar.gz' +TEST_FILE_1 = 'test.txt' +TEST_FILE_2 = TEST_FOLDER + os.sep + 'test2.txt' + +class BackuptoolTest(unittest.TestCase): + """Test class for backuptool functions""" + def setUp(self): + os.chdir(DATA_FOLDER) + # Create test folder + if not os.path.exists(TEST_FOLDER): + os.makedirs(TEST_FOLDER) + # Create test files "test.txt", "test/test2.txt" + fil = open(TEST_FILE_1, 'w') + fil.close() + fil = open(TEST_FILE_2, 'w') + fil.close() + # Backup to "test.tar.gz" + if os.path.exists(bt.BACKUP_FOLDER + os.sep + TEST_TAR): + os.remove(bt.BACKUP_FOLDER + os.sep + TEST_TAR) + bt.backupfiles(TEST_TAR) + # Remove test files and directory + os.remove(TEST_FILE_1) + os.remove(TEST_FILE_2) + # Restore from "test.tar.gz" + bt.restorefiles(TEST_TAR) + + def tearDown(self): + os.remove(TEST_FILE_1) + os.remove(TEST_FILE_2) + os.remove(bt.BACKUP_FOLDER + os.sep + TEST_TAR) + + def test_backupexists(self): + """Checks if the backup file exists""" + self.assertTrue(os.path.isfile(bt.BACKUP_FOLDER + os.sep + TEST_TAR)) + + def test_restoreexists(self): + """Checks if the restored files exists""" + self.assertTrue(os.path.isfile(TEST_FILE_1)) + self.assertTrue(os.path.isfile(TEST_FILE_2)) From e7a52467a0c5f2e22182be5328bc509c8c33460e Mon Sep 17 00:00:00 2001 From: Marc Jamot Date: Fri, 13 Nov 2015 15:37:27 +0100 Subject: [PATCH 15/15] Rebase master --- api/restapi.py | 12 +++++ backup.html | 35 --------------- backupTool.py | 112 ----------------------------------------------- db/backuptool.py | 62 ++++++++++++++++++++++++++ test.html | 107 -------------------------------------------- 5 files changed, 74 insertions(+), 254 deletions(-) delete mode 100644 backup.html delete mode 100644 backupTool.py create mode 100644 db/backuptool.py delete mode 100644 test.html diff --git a/api/restapi.py b/api/restapi.py index 99a5a1ba..43e05a91 100644 --- a/api/restapi.py +++ b/api/restapi.py @@ -20,6 +20,7 @@ from market.profile import Profile from market.contracts import Contract from net.upnp import PortMapper +import db.backuptool DEFAULT_RECORDS_COUNT = 20 DEFAULT_RECORDS_OFFSET = 0 @@ -795,5 +796,16 @@ def get_response(num): request.finish() return server.NOT_DONE_YET + @POST('^/api/v1/backup_files') + def backup_files(self, request): + """Archives OpenBazaar files in a single tar archive.""" + output_name = request.args["output_name"][0] + return db.backuptool.backupfiles(output_name) + + @POST('^/api/v1/restore_files') + def restore_files(self, request): + """Restores files of given archive to OpenBazaar folder.""" + input_file = request.args["input_file"][0] + return db.backuptool.restorefiles(input_file) diff --git a/backup.html b/backup.html deleted file mode 100644 index 446b0691..00000000 --- a/backup.html +++ /dev/null @@ -1,35 +0,0 @@ - - -Underscored fields are required
-Example data is provided below each input
-
-Backup
-
- -List of tables to backup (comma separated):
-
-hashmap, profile, listings
-
-Name of file to create (don't forget to add .tar.gz), leave empty for todays date:
-
-myveryspecialbackup.tar.gz
-
- - -
-
-Restore
-
-
-Input file:
-
-myveryspecialbackup.tar.gz
-
-Boolean, if data should be removed in the table before input:
-
-True for delete, False or empty for append
-
- - - - \ No newline at end of file diff --git a/backupTool.py b/backupTool.py deleted file mode 100644 index 450960e3..00000000 --- a/backupTool.py +++ /dev/null @@ -1,112 +0,0 @@ -__author__ = 'marc' -from constants import DATA_FOLDER -import csv -import db.datastore as db -import os -import re -import shutil -import sqlite3 as lite -import tarfile -import time - -TABLES = [ - ('hashmap', ['hash', 'filepath']), - ('profile', ['id', 'serializedUserInfo']), - ('listings', ['id', 'serializedListings']), - ('keys', ['type', 'privkey', 'pubkey']), - ('followers', ['id', 'serializedFollowers']), - ('following', ['id', 'serializedFollowing']), - ('messages', ['guid', 'handle', 'signed_pubkey', 'encryption_pubkey', 'subject', 'message_type', 'message', 'timestamp', 'avatar_hash', 'signature', 'outgoing']), - ('notifications', ['guid', 'handle', 'message', 'timestamp', 'avatar_hash']), - ('vendors', ['guid', 'ip', 'port', 'signedPubkey']), - ('moderators', ['guid', 'signedPubkey', 'encryptionKey', 'encryptionSignature', 'bitcoinKey', 'bitcoinSignature', 'handle']), - ('purchases', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller', 'proofSig']), - ('sales', ['id', 'title', 'timestamp', 'btc', 'address', 'status', 'thumbnail', 'seller']), - ('dht', ['keyword', 'id', 'value', 'birthday']) -] - -def _getDatabase(): - Database = db.Database() - return Database.DATABASE - -def _exportDatabaseToCsv(tablesAndColumns): - """Reads the database for all given tables and stores them as CSV files.""" - dbFile = _getDatabase() - result = None - with lite.connect(dbFile) as dbConnection: - dbConnection.text_factory = str - cursor = dbConnection.cursor() - for table in tablesAndColumns: - table_name = table[0] - table_columns = ', '.join(table[1]) - data = cursor.execute("SELECT {0} FROM {1}".format(table_columns, table_name)) - fileName = 'table_{0}.csv'.format(table_name) - filePath = os.path.join('backup', fileName) - with open(filePath, 'wb') as f: - writer = csv.writer(f) - writer.writerow(table[1]) - writer.writerows(data) - return result - -def backup(tablesAndColumns=None, output=None): - """Archives given tables and files in a single tar archive.""" - os.chdir(DATA_FOLDER) - - # Remove existing database files and re-make them - if os.path.exists('backup'): - shutil.rmtree('backup') - os.makedirs('backup') - _exportDatabaseToCsv(tablesAndColumns) - - # Archive files - files = os.listdir(DATA_FOLDER) - if not output: - output = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d')) - with tarfile.open(output, 'w:gz') as tar: - for f in files: - tar.add(f) - tar.close() - -def _importCsvToTable(fileName, deleteDataFirst=False): - """Imports given CSV file to the database.""" - tableName = re.search('table_(\w+).csv', fileName).group(1) - dbFile = _getDatabase() - with lite.connect(dbFile) as dbConnection: - dbConnection.text_factory = str - cursor = dbConnection.cursor() - if deleteDataFirst: - cursor.execute('DELETE FROM {0}'.format(tableName)) - with open(fileName, 'rb') as f: - reader = csv.reader(f) - header = True - for row in reader: - if header: - header = False - columns = ', '.join(['?' for column in row]) - insertsql = 'INSERT INTO {0} VALUES ({1})'.format(tableName, columns) - rowlen = len(row) - else: - if len(row) == rowlen: - cursor.execute(insertsql, row) - - -def restore(input, deleteTableDataFirst=False): - """Restores files and tables of given archive.""" - os.chdir(DATA_FOLDER) - - # Remove existing database files if any - if os.path.exists('backup'): - shutil.rmtree('backup') - - # Unarchive files - with tarfile.open(input, 'r:gz') as tar: - tar.extractall() - - # Restore database files to the database - if os.path.exists('backup'): - files = ['backup/{0}'.format(f) for f in os.listdir('backup')] - for f in files: - _importCsvToTable(f, deleteTableDataFirst) - -if __name__ == '__main__': - print 'Backup tool works as a library.' diff --git a/db/backuptool.py b/db/backuptool.py new file mode 100644 index 00000000..bc9a0622 --- /dev/null +++ b/db/backuptool.py @@ -0,0 +1,62 @@ +"""Import and export data for the OpenBazaar server.""" +__author__ = 'marc' +from constants import DATA_FOLDER +import db.datastore as db +import os +import sqlite3 as lite +import tarfile +import time + +BACKUP_FOLDER = 'backup' + +def _getdatabase(): + """Retrieves the OpenBazaar database file.""" + database = db.Database() + return database.DATABASE + +def backupfiles(output_name=None): + """Archives OpenBazaar files in a single tar archive.""" + os.chdir(DATA_FOLDER) + if not os.path.exists(BACKUP_FOLDER): + os.makedirs(BACKUP_FOLDER) + if not output_name: + output_name = 'backup_{0}.tar.gz'.format(time.strftime('%Y-%m-%d')) + if not os.path.isabs(output_name): + output = BACKUP_FOLDER + os.sep + output_name + if os.path.isfile(output): + raise IOError(output + ' already exists.') + + # Lock the database + db_file = _getdatabase() + db_connection = lite.connect(db_file) + db_connection.commit() + + # Archive files + files = os.listdir(DATA_FOLDER) + with tarfile.open(output, 'w:gz') as tar: + for fil in files: + if fil != BACKUP_FOLDER: + tar.add(fil) + tar.close() + + # Unlock database + db_connection.rollback() + db_connection.close() + + return True + + +def restorefiles(input_file): + """Restores files of given archive to OpenBazaar folder.""" + os.chdir(DATA_FOLDER) + if not os.path.isabs(input_file): + input_file = BACKUP_FOLDER + os.sep + input_file + + if not os.path.isfile(input_file): + raise IOError(input_file + ' does not exist.') + + # Unarchive files + with tarfile.open(input_file, 'r:gz') as tar: + tar.extractall() + + return True diff --git a/test.html b/test.html deleted file mode 100644 index 2e4f1bb1..00000000 --- a/test.html +++ /dev/null @@ -1,107 +0,0 @@ - - -

Profile

- - Name:
- Location:
- handle:
- about:
- short description:
- website:
- email:
- Avatar
- Header
- -
-

-
-
-
- -
-

-
- Account Type:
- Username:
- Proof URL: - -
-

Contract

-
- Type:
- Title:
- Description:
- Expiration Date:
- Price:
- BTC -
- USD
- NSFW
- Processing Time:
- Estimated Delivery Domestic:
- Estimated Delivery International:
- Category:
- Condition:
- SKU:
- Terms & Conditions:
- Return Policy:
- Free Shipping
- Shipping Currency:
- BTC -
- USD
- Shipping Domestic:
- Shipping International:
- Shipping Origin:
- Ships To:
- Keyword:
- Moderator:
- - - Image
- -
-

-

Purchase

-
- Contract ID:
- Quantity:
- Name:
- Address:
- City:
- State:
- Postal Code:
- Country:
- Moderator:
- -
- -