diff --git a/integrations/rss/rss-bot b/integrations/rss/rss-bot
index 5048bdbb..4926b993 100644
--- a/integrations/rss/rss-bot
+++ b/integrations/rss/rss-bot
@@ -39,9 +39,9 @@ from typing import Dict, List, Tuple, Any
 
 import feedparser
 import zulip
-VERSION = "0.9" # type: str
-RSS_DATA_DIR = os.path.expanduser(os.path.join('~', '.cache', 'zulip-rss')) # type: str
-OLDNESS_THRESHOLD = 30 # type: int
+VERSION = "0.9"  # type: str
+RSS_DATA_DIR = os.path.expanduser(os.path.join('~', '.cache', 'zulip-rss'))  # type: str
+OLDNESS_THRESHOLD = 30  # type: int
 
 usage = """Usage: Send summaries of RSS entries for your favorite feeds to Zulip.
 
@@ -67,7 +67,7 @@ stream every 5 minutes is:
 
 */5 * * * * /usr/local/share/zulip/integrations/rss/rss-bot"""
 
-parser = optparse.OptionParser(usage) # type: optparse.OptionParser
+parser = optparse.OptionParser(usage)  # type: optparse.OptionParser
 parser.add_option('--stream',
                   dest='stream',
                   help='The stream to which to send RSS messages.',
@@ -94,7 +94,7 @@ parser.add_option('--math',
                   help='Convert $ to $$ (for KaTeX processing)',
                   default=False)
 parser.add_option_group(zulip.generate_option_group(parser))
-(opts, args) = parser.parse_args() # type: Tuple[Any, List[str]]
+(opts, args) = parser.parse_args()  # type: Tuple[Any, List[str]]
 
 def mkdir_p(path):
     # type: (str) -> None
@@ -114,15 +114,15 @@ except OSError:
     print("Unable to store RSS data at %s." % (opts.data_dir,), file=sys.stderr)
     exit(1)
 
-log_file = os.path.join(opts.data_dir, "rss-bot.log") # type: str
-log_format = "%(asctime)s: %(message)s" # type: str
+log_file = os.path.join(opts.data_dir, "rss-bot.log")  # type: str
+log_format = "%(asctime)s: %(message)s"  # type: str
 logging.basicConfig(format=log_format)
 
-formatter = logging.Formatter(log_format) # type: logging.Formatter
-file_handler = logging.FileHandler(log_file) # type: logging.FileHandler
+formatter = logging.Formatter(log_format)  # type: logging.Formatter
+file_handler = logging.FileHandler(log_file)  # type: logging.FileHandler
 file_handler.setFormatter(formatter)
 
-logger = logging.getLogger(__name__) # type: logging.Logger
+logger = logging.getLogger(__name__)  # type: logging.Logger
 logger.setLevel(logging.DEBUG)
 logger.addHandler(file_handler)
 
@@ -136,7 +136,7 @@ class MLStripper(HTMLParser):
     def __init__(self):
         # type: () -> None
         self.reset()
-        self.fed = [] # type: List[str]
+        self.fed = []  # type: List[str]
 
     def handle_data(self, data):
         # type: (str) -> None
@@ -180,7 +180,7 @@ def send_zulip(entry, feed_name):
     content = "**[%s](%s)**\n%s\n%s" % (entry.title,
                                         entry.link,
                                         strip_tags(body),
-                                        entry.link) # type: str
+                                        entry.link)  # type: str
 
     if opts.math:
         content = content.replace('$', '$$')
@@ -190,36 +190,36 @@ def send_zulip(entry, feed_name):
                "to": opts.stream,
                "subject": elide_subject(feed_name),
                "content": content,
-               } # type: Dict[str, str]
+               }  # type: Dict[str, str]
     return client.send_message(message)
 
 try:
     with open(opts.feed_file, "r") as f:
-        feed_urls = [feed.strip() for feed in f.readlines()] # type: List[str]
+        feed_urls = [feed.strip() for feed in f.readlines()]  # type: List[str]
 except IOError:
     log_error_and_exit("Unable to read feed file at %s." % (opts.feed_file,))
 
 client = zulip.Client(email=opts.zulip_email, api_key=opts.zulip_api_key,
-                      site=opts.zulip_site, client="ZulipRSS/" + VERSION) # type: zulip.Client
+                      site=opts.zulip_site, client="ZulipRSS/" + VERSION)  # type: zulip.Client
 
-first_message = True # type: bool
+first_message = True  # type: bool
 
 for feed_url in feed_urls:
-    feed_file = os.path.join(opts.data_dir, urllib.parse.urlparse(feed_url).netloc) # Type: str
+    feed_file = os.path.join(opts.data_dir, urllib.parse.urlparse(feed_url).netloc)  # Type: str
 
     try:
         with open(feed_file, "r") as f:
-            old_feed_hashes = dict((line.strip(), True) for line in f.readlines()) # type: Dict[str, bool]
+            old_feed_hashes = dict((line.strip(), True) for line in f.readlines())  # type: Dict[str, bool]
     except IOError:
         old_feed_hashes = {}
 
-    new_hashes = [] # type: List[str]
-    data = feedparser.parse(feed_url) # type: feedparser.parse
+    new_hashes = []  # type: List[str]
+    data = feedparser.parse(feed_url)  # type: feedparser.parse
 
     for entry in data.entries:
-        entry_hash = compute_entry_hash(entry) # type: str
+        entry_hash = compute_entry_hash(entry)  # type: str
         # An entry has either been published or updated.
-        entry_time = entry.get("published_parsed", entry.get("updated_parsed")) # type: Tuple[int, int]
+        entry_time = entry.get("published_parsed", entry.get("updated_parsed"))  # type: Tuple[int, int]
         if entry_time is not None and (time.time() - calendar.timegm(entry_time)) > OLDNESS_THRESHOLD * 60 * 60 * 24:
             # As a safeguard against misbehaving feeds, don't try to process
             # entries older than some threshold.
@@ -232,9 +232,9 @@ for feed_url in feed_urls:
             # entries in reverse chronological order.
             break
 
-        feed_name = data.feed.title or feed_url # type: str
+        feed_name = data.feed.title or feed_url  # type: str
 
-        response = send_zulip(entry, feed_name) # type: Dict[str, Any]
+        response = send_zulip(entry, feed_name)  # type: Dict[str, Any]
         if response["result"] != "success":
             logger.error("Error processing %s" % (feed_url,))
             logger.error(str(response))