Initial upload of bennys production files
This commit is contained in:
BIN
instance/blog.db
Normal file
BIN
instance/blog.db
Normal file
Binary file not shown.
6
instance/email_list_export.csv
Normal file
6
instance/email_list_export.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
id,email,first_name,last_name,confirmed,confirmation_token
|
||||
1,johnson126762@gmail.com,Makayla,Johnson,1,
|
||||
2,me@sampacker.com,Sam,Packer,1,
|
||||
3,aaronheisler2@gmail.com,Aaron,Heisler,1,
|
||||
4,uampaoqy@dont-reply.me,MyName,Alice,0,QhnYODFBfOBSr8OdQfhVFQ
|
||||
5,sydlh12@gmail.com,Sydney,Hardin ,1,
|
||||
|
30
instance/export.py
Normal file
30
instance/export.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import sqlite3
|
||||
import pandas as pd
|
||||
import os
|
||||
|
||||
def export_email_list_to_csv(db_path: str, output_path: str = "email_list_export.csv"):
|
||||
if not os.path.exists(db_path):
|
||||
raise FileNotFoundError(f"Database not found: {db_path}")
|
||||
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("SELECT * FROM email_list")
|
||||
rows = cursor.fetchall()
|
||||
data = [dict(row) for row in rows]
|
||||
if not data:
|
||||
print("No data found in email_list table.")
|
||||
return
|
||||
|
||||
df = pd.DataFrame(data)
|
||||
df.to_csv(output_path, index=False)
|
||||
print(f"✅ Exported {len(df)} rows to {output_path}")
|
||||
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
export_email_list_to_csv("/var/www/bennysblog/instance/blog.db")
|
||||
|
||||
80
instance/migrate.py
Normal file
80
instance/migrate.py
Normal file
@@ -0,0 +1,80 @@
|
||||
import sqlite3
|
||||
import os
|
||||
import argparse
|
||||
|
||||
def migrate_sqlite_common_columns(source_db_path: str, dest_db_path: str):
|
||||
if not os.path.exists(source_db_path):
|
||||
raise FileNotFoundError(f"Source DB not found: {source_db_path}")
|
||||
|
||||
source_conn = sqlite3.connect(source_db_path)
|
||||
dest_conn = sqlite3.connect(dest_db_path)
|
||||
|
||||
source_conn.row_factory = sqlite3.Row
|
||||
|
||||
try:
|
||||
source_cursor = source_conn.cursor()
|
||||
dest_cursor = dest_conn.cursor()
|
||||
|
||||
source_cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
tables = [row[0] for row in source_cursor.fetchall() if not row[0].startswith('sqlite_')]
|
||||
|
||||
for table in tables:
|
||||
source_cursor.execute(f"PRAGMA table_info({table})")
|
||||
source_cols_info = source_cursor.fetchall()
|
||||
source_cols = [row[1] for row in source_cols_info]
|
||||
|
||||
dest_cursor.execute(f"PRAGMA table_info({table})")
|
||||
dest_cols_info = dest_cursor.fetchall()
|
||||
dest_cols = [row[1] for row in dest_cols_info]
|
||||
|
||||
common_cols = [col for col in source_cols if col in dest_cols]
|
||||
if not common_cols:
|
||||
print(f"⚠️ Skipping table '{table}' — no common columns")
|
||||
continue
|
||||
|
||||
required_not_null_cols = [row[1] for row in dest_cols_info if row[3] == 1]
|
||||
missing_required = [col for col in required_not_null_cols if col not in common_cols]
|
||||
|
||||
if missing_required:
|
||||
print(f"⛔ Skipping table '{table}' — missing required NOT NULL columns in source: {missing_required}")
|
||||
continue
|
||||
|
||||
print(f"\n➡️ Table: {table}")
|
||||
print(f" Common columns: {common_cols}")
|
||||
print(f" Required NOT NULL columns: {required_not_null_cols}")
|
||||
|
||||
col_str = ", ".join(common_cols)
|
||||
placeholder_str = ", ".join("?" for _ in common_cols)
|
||||
|
||||
source_cursor.execute(f"SELECT {col_str} FROM {table}")
|
||||
rows = source_cursor.fetchall()
|
||||
|
||||
filtered_rows = []
|
||||
skipped_count = 0
|
||||
for row in rows:
|
||||
if all(row[col] is not None for col in required_not_null_cols if col in common_cols):
|
||||
filtered_rows.append(tuple(row[col] for col in common_cols))
|
||||
else:
|
||||
skipped_count += 1
|
||||
|
||||
print(f" ✅ Inserted: {len(filtered_rows)} ⛔ Skipped: {skipped_count}")
|
||||
|
||||
if filtered_rows:
|
||||
insert_sql = f"INSERT INTO {table} ({col_str}) VALUES ({placeholder_str})"
|
||||
dest_cursor.executemany(insert_sql, filtered_rows)
|
||||
|
||||
dest_conn.commit()
|
||||
print("\n✅ Partial migration of common columns completed (incomplete rows skipped).")
|
||||
|
||||
finally:
|
||||
source_conn.close()
|
||||
dest_conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Migrate data between two SQLite databases by matching column names.")
|
||||
parser.add_argument("source_db", help="Path to the source SQLite database")
|
||||
parser.add_argument("dest_db", help="Path to the destination SQLite database")
|
||||
args = parser.parse_args()
|
||||
|
||||
migrate_sqlite_common_columns(args.source_db, args.dest_db)
|
||||
|
||||
BIN
instance/posts.db
Normal file
BIN
instance/posts.db
Normal file
Binary file not shown.
Reference in New Issue
Block a user