Loading tornado-backend/blog.py +108 −5 Original line number Diff line number Diff line Loading @@ -28,12 +28,14 @@ import tornado.web import tornado.locks import unicodedata from datetime import datetime, date, timezone import pytz import json import logging # Configure logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') from datetime import datetime, date from tornado.options import define, options Loading @@ -52,6 +54,8 @@ class NoResultError(Exception): async def maybe_create_tables(db): try: with await db.cursor() as cur: # Set timezone for the current session await cur.execute("SET TIME ZONE 'UTC';") await cur.execute("SELECT COUNT(*) FROM entries LIMIT 1") await cur.fetchone() except psycopg2.ProgrammingError: Loading Loading @@ -80,6 +84,7 @@ class Application(tornado.web.Application): (r"/tornado-backend/get-user-role", GetUserRoleHandler), (r"/tornado-backend/blog-entries", BlogEntriesHandler), (r"/tornado-backend/create-blog-entry", CreateBlogEntryHandler), (r"/tornado-backend/update-blog-entry/([^/]+)", UpdateBlogEntriesHandler), ] settings = dict( blog_title="Tornado Blog", Loading Loading @@ -160,6 +165,9 @@ class BaseHandler(tornado.web.RequestHandler): # Serialization of dates def date_serialization(self, obj): if isinstance(obj, (date, datetime)): # Ensure the datetime is timezone-aware if obj.tzinfo is None: obj = pytz.UTC.localize(obj) # Assuming UTC as default return obj.isoformat() raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable") Loading @@ -169,6 +177,16 @@ class HomeHandler(BaseHandler): entries = await self.query( "SELECT * FROM entries ORDER BY published DESC LIMIT 5" ) # Ensure all `published` attributes in entries are naive for entry in entries: if entry.published and entry.published.tzinfo is not None: # Only if it's timezone-aware # Convert to naive datetime in UTC entry.published = entry.published.astimezone(timezone.utc).replace(tzinfo=None) # Log the type of the `published` field for the first entry (if exists) if entries: logging.info(f"Logging type of entries[0].published: {type(entries[0].published)}") if not entries: self.redirect("/compose") return Loading @@ -178,14 +196,34 @@ class HomeHandler(BaseHandler): class EntryHandler(BaseHandler): async def get(self, slug): entry = await self.queryone("SELECT * FROM entries WHERE slug = %s", slug) # Ensure `published` attribute in entry is naive if entry.published and entry.published.tzinfo is not None: # Only if it's timezone-aware # Convert to naive datetime in UTC entry.published = entry.published.astimezone(timezone.utc).replace(tzinfo=None) # Log the type of the `published` field for the first entry (if exists) if entry: logging.info(f"Logging type of entries[0].published: {type(entry.published)}") if not entry: raise tornado.web.HTTPError(404) self.render("entry.html", entry=entry) class ArchiveHandler(BaseHandler): async def get(self): entries = await self.query("SELECT * FROM entries ORDER BY published DESC") # Ensure all `published` attributes in entries are naive for entry in entries: if entry.published and entry.published.tzinfo is not None: # Only if it's timezone-aware # Convert to naive datetime in UTC entry.published = entry.published.astimezone(timezone.utc).replace(tzinfo=None) # Log the type of the `published` field for the first entry (if exists) if entries: logging.info(f"Logging type of entries[0].published: {type(entries[0].published)}") self.render("archive.html", entries=entries) Loading @@ -194,6 +232,16 @@ class FeedHandler(BaseHandler): entries = await self.query( "SELECT * FROM entries ORDER BY published DESC LIMIT 10" ) # Ensure all `published` attributes in entries are naive for entry in entries: if entry.published and entry.published.tzinfo is not None: # Only if it's timezone-aware # Convert to naive datetime in UTC entry.published = entry.published.astimezone(timezone.utc).replace(tzinfo=None) # Log the type of the `published` field for the first entry (if exists) if entries: logging.info(f"Logging type of entries[0].published: {type(entries[0].published)}") self.set_header("Content-Type", "application/atom+xml") self.render("feed.xml", entries=entries) Loading Loading @@ -432,11 +480,9 @@ class BlogEntriesHandler(BaseHandler): class CreateBlogEntryHandler(BaseHandler): @tornado.web.authenticated async def post(self): logging.info(f"Data arriving at CreateBlogEntryHandler {self.request.body}") try: # Parse JSON data from the request body data = json.loads(self.request.body) logging.info(f"Data arriving at CreateBlogEntryHandler {data}") title = data.get("title") text = data.get("markdown") Loading @@ -453,7 +499,6 @@ class CreateBlogEntryHandler(BaseHandler): slug = slug.encode("ascii", "ignore").decode("ascii") if not slug: slug = "entry" # Ensure slug uniqueness while True: existing = await self.query("SELECT * FROM entries WHERE slug = %s", slug) Loading @@ -462,14 +507,17 @@ class CreateBlogEntryHandler(BaseHandler): slug += "-2" # Insert the new entry into the database current_time = datetime.now(pytz.UTC) # Use UTC timezone await self.execute( "INSERT INTO entries (author_id, title, slug, markdown, html, published, updated) " "VALUES (%s, %s, %s, %s, %s, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)", "VALUES (%s, %s, %s, %s, %s, %s, %s)", self.current_user.id, title, slug, text, html, current_time, current_time, ) # Respond with success Loading @@ -481,6 +529,61 @@ class CreateBlogEntryHandler(BaseHandler): self.write({"status": "error", "message": "Failed to create blog post"}) class UpdateBlogEntriesHandler(BaseHandler): @tornado.web.authenticated async def get(self, slug): try: # Query one enty, by slug entry = await self.queryone("SELECT * FROM entries WHERE slug = %s", slug) logging.info(f"logging of entry: {entry}") # Format the entries as a JSON response # self.write({"status": "success", "data": entries}) self.write(json.dumps({"status": "success", "entry": entry }, default=self.date_serialization)) except Exception as e: logging.error(f"Error fetching blog entries: {e}") self.set_status(500) self.write({"status": "error", "message": "Failed to fetch blog entries"}) async def post(self, slug): try: # Parse JSON data from the request body data = json.loads(self.request.body) title = data.get("title") text = data.get("markdown") if not title or not text: raise tornado.web.HTTPError(400, "Title and markdown are required") # Convert markdown to HTML html = markdown.markdown(text) # Update the entry in the database current_time = datetime.now(pytz.UTC) # Use UTC timezone await self.execute( "UPDATE entries SET author_id = %s, title = %s, markdown = %s, html = %s, updated = %s " "WHERE slug = %s", self.current_user.id, title, text, html, slug, current_time, ) # Respond with success self.write({"status": "success", "message": "Blog post updated", "slug": slug}) except Exception as e: logging.error(f"Error updating blog post: {e}") self.set_status(500) self.write({"status": "error", "message": "Failed to update blog post"}) async def main(): tornado.options.parse_command_line() Loading Loading
tornado-backend/blog.py +108 −5 Original line number Diff line number Diff line Loading @@ -28,12 +28,14 @@ import tornado.web import tornado.locks import unicodedata from datetime import datetime, date, timezone import pytz import json import logging # Configure logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') from datetime import datetime, date from tornado.options import define, options Loading @@ -52,6 +54,8 @@ class NoResultError(Exception): async def maybe_create_tables(db): try: with await db.cursor() as cur: # Set timezone for the current session await cur.execute("SET TIME ZONE 'UTC';") await cur.execute("SELECT COUNT(*) FROM entries LIMIT 1") await cur.fetchone() except psycopg2.ProgrammingError: Loading Loading @@ -80,6 +84,7 @@ class Application(tornado.web.Application): (r"/tornado-backend/get-user-role", GetUserRoleHandler), (r"/tornado-backend/blog-entries", BlogEntriesHandler), (r"/tornado-backend/create-blog-entry", CreateBlogEntryHandler), (r"/tornado-backend/update-blog-entry/([^/]+)", UpdateBlogEntriesHandler), ] settings = dict( blog_title="Tornado Blog", Loading Loading @@ -160,6 +165,9 @@ class BaseHandler(tornado.web.RequestHandler): # Serialization of dates def date_serialization(self, obj): if isinstance(obj, (date, datetime)): # Ensure the datetime is timezone-aware if obj.tzinfo is None: obj = pytz.UTC.localize(obj) # Assuming UTC as default return obj.isoformat() raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable") Loading @@ -169,6 +177,16 @@ class HomeHandler(BaseHandler): entries = await self.query( "SELECT * FROM entries ORDER BY published DESC LIMIT 5" ) # Ensure all `published` attributes in entries are naive for entry in entries: if entry.published and entry.published.tzinfo is not None: # Only if it's timezone-aware # Convert to naive datetime in UTC entry.published = entry.published.astimezone(timezone.utc).replace(tzinfo=None) # Log the type of the `published` field for the first entry (if exists) if entries: logging.info(f"Logging type of entries[0].published: {type(entries[0].published)}") if not entries: self.redirect("/compose") return Loading @@ -178,14 +196,34 @@ class HomeHandler(BaseHandler): class EntryHandler(BaseHandler): async def get(self, slug): entry = await self.queryone("SELECT * FROM entries WHERE slug = %s", slug) # Ensure `published` attribute in entry is naive if entry.published and entry.published.tzinfo is not None: # Only if it's timezone-aware # Convert to naive datetime in UTC entry.published = entry.published.astimezone(timezone.utc).replace(tzinfo=None) # Log the type of the `published` field for the first entry (if exists) if entry: logging.info(f"Logging type of entries[0].published: {type(entry.published)}") if not entry: raise tornado.web.HTTPError(404) self.render("entry.html", entry=entry) class ArchiveHandler(BaseHandler): async def get(self): entries = await self.query("SELECT * FROM entries ORDER BY published DESC") # Ensure all `published` attributes in entries are naive for entry in entries: if entry.published and entry.published.tzinfo is not None: # Only if it's timezone-aware # Convert to naive datetime in UTC entry.published = entry.published.astimezone(timezone.utc).replace(tzinfo=None) # Log the type of the `published` field for the first entry (if exists) if entries: logging.info(f"Logging type of entries[0].published: {type(entries[0].published)}") self.render("archive.html", entries=entries) Loading @@ -194,6 +232,16 @@ class FeedHandler(BaseHandler): entries = await self.query( "SELECT * FROM entries ORDER BY published DESC LIMIT 10" ) # Ensure all `published` attributes in entries are naive for entry in entries: if entry.published and entry.published.tzinfo is not None: # Only if it's timezone-aware # Convert to naive datetime in UTC entry.published = entry.published.astimezone(timezone.utc).replace(tzinfo=None) # Log the type of the `published` field for the first entry (if exists) if entries: logging.info(f"Logging type of entries[0].published: {type(entries[0].published)}") self.set_header("Content-Type", "application/atom+xml") self.render("feed.xml", entries=entries) Loading Loading @@ -432,11 +480,9 @@ class BlogEntriesHandler(BaseHandler): class CreateBlogEntryHandler(BaseHandler): @tornado.web.authenticated async def post(self): logging.info(f"Data arriving at CreateBlogEntryHandler {self.request.body}") try: # Parse JSON data from the request body data = json.loads(self.request.body) logging.info(f"Data arriving at CreateBlogEntryHandler {data}") title = data.get("title") text = data.get("markdown") Loading @@ -453,7 +499,6 @@ class CreateBlogEntryHandler(BaseHandler): slug = slug.encode("ascii", "ignore").decode("ascii") if not slug: slug = "entry" # Ensure slug uniqueness while True: existing = await self.query("SELECT * FROM entries WHERE slug = %s", slug) Loading @@ -462,14 +507,17 @@ class CreateBlogEntryHandler(BaseHandler): slug += "-2" # Insert the new entry into the database current_time = datetime.now(pytz.UTC) # Use UTC timezone await self.execute( "INSERT INTO entries (author_id, title, slug, markdown, html, published, updated) " "VALUES (%s, %s, %s, %s, %s, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)", "VALUES (%s, %s, %s, %s, %s, %s, %s)", self.current_user.id, title, slug, text, html, current_time, current_time, ) # Respond with success Loading @@ -481,6 +529,61 @@ class CreateBlogEntryHandler(BaseHandler): self.write({"status": "error", "message": "Failed to create blog post"}) class UpdateBlogEntriesHandler(BaseHandler): @tornado.web.authenticated async def get(self, slug): try: # Query one enty, by slug entry = await self.queryone("SELECT * FROM entries WHERE slug = %s", slug) logging.info(f"logging of entry: {entry}") # Format the entries as a JSON response # self.write({"status": "success", "data": entries}) self.write(json.dumps({"status": "success", "entry": entry }, default=self.date_serialization)) except Exception as e: logging.error(f"Error fetching blog entries: {e}") self.set_status(500) self.write({"status": "error", "message": "Failed to fetch blog entries"}) async def post(self, slug): try: # Parse JSON data from the request body data = json.loads(self.request.body) title = data.get("title") text = data.get("markdown") if not title or not text: raise tornado.web.HTTPError(400, "Title and markdown are required") # Convert markdown to HTML html = markdown.markdown(text) # Update the entry in the database current_time = datetime.now(pytz.UTC) # Use UTC timezone await self.execute( "UPDATE entries SET author_id = %s, title = %s, markdown = %s, html = %s, updated = %s " "WHERE slug = %s", self.current_user.id, title, text, html, slug, current_time, ) # Respond with success self.write({"status": "success", "message": "Blog post updated", "slug": slug}) except Exception as e: logging.error(f"Error updating blog post: {e}") self.set_status(500) self.write({"status": "error", "message": "Failed to update blog post"}) async def main(): tornado.options.parse_command_line() Loading