Robots and Sitemaps

Dealing with google site indexing from google and other search engines can be a pain, but fortunately django makes it easy with django.contrib.sitemaps and the django-robots app. In this video learn to create sitemap.xml and robots.txt quickly and easily.
todo/settings.py
INSTALLED_APPS = (
    ...
    'django.contrib.sites',
    'django.contrib.sitemaps',
    'robots',
)
todo/urls.py
from main.sitemaps import TodoSitemap

sitemaps = {
    'todos': TodoSitemap()
}

urlpatterns = patterns('',
    url(r'^sitemap.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
    url(r'^robots.txt$', include('robots.urls')),
)
main/sitemaps.py
from django.contrib.sitemaps import Sitemap

from main.models import Item

class TodoSitemap(Sitemap):
    changefreq = "weekly"
    priority = 0.5

    def items(self):
        return Item.objects.all()
comments powered by Disqus