This repository has been archived on 2023-02-21. You can view files and clone it, but cannot push or open issues or pull requests.
library-web/src/lgo.py

803 lines
34 KiB
Python
Executable File

#!/usr/bin/env python
#
# libgo - script to build library.gnome.org
# Copyright (C) 2007-2009 Frederic Peters
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import os
import sys
import re
import tempfile
import urllib2
from cStringIO import StringIO
from optparse import OptionParser
import logging
try:
import elementtree.ElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
import tarfile
import stat
import subprocess
import dbm
import shutil
import socket
import __builtin__
data_dir = os.path.join(os.path.dirname(__file__), '../data')
__builtin__.__dict__['data_dir'] = data_dir
import errors
import utils
from utils import version_cmp, is_version_number
from document import Document
from overlay import Overlay
from modtypes.gnomedocbook import GnomeDocbookModule
from modtypes.gtkdoc import GtkDocModule
from modtypes.htmlfiles import HtmlFilesModule
from modtypes.mallard import MallardModule
from app import App
# timeout for downloads, so it doesn't hang on connecting to sourceforge
socket.setdefaulttimeout(10)
class FtpDotGnomeDotOrg:
'''Class providing operations on ftp.gnome.org, be it a local copy
or the real remote one'''
def __init__(self, config):
self.config = config
if self.config.ftp_gnome_org_local_copy:
self.ftp_gnome_org_local_copy = self.config.ftp_gnome_org_local_copy
self.download = self.download_local
self.listdir = self.listdir_local
else:
self.ftp_gnome_org_cache_dir = os.path.join(
config.download_cache_dir, 'ftp.gnome.org')
if not os.path.exists(self.ftp_gnome_org_cache_dir):
os.makedirs(self.ftp_gnome_org_cache_dir)
def download(self, filename):
cache_filename = os.path.join(self.ftp_gnome_org_cache_dir, filename)
cache_dir = os.path.split(cache_filename)[0]
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
# TODO: support for self.config.use_latest_version
if os.path.exists(cache_filename) and os.stat(cache_filename)[stat.ST_SIZE]:
logging.debug('using cache of ftp://ftp.gnome.org/%s' % filename)
return (cache_filename, open(cache_filename))
logging.info('downloading ftp://ftp.gnome.org/%s' % filename)
try:
open(cache_filename, 'w').write(
urllib2.urlopen('ftp://ftp.gnome.org/' + filename).read())
except IOError:
raise
return (cache_filename, open(cache_filename))
def download_local(self, filename):
local_filename = os.path.join(self.ftp_gnome_org_local_copy, filename)
if self.config.use_latest_version:
dirname, basename = os.path.split(local_filename)
module_name = basename.split('-')[0]
try:
latest_version = [x for x in os.listdir(dirname) if x.startswith('LATEST-IS-')]
if latest_version:
latest_base = '%s-%s.' % (module_name, latest_version[0].split('-')[-1])
new_basename = [x for x in os.listdir(dirname) if \
x.startswith(latest_base) and (
x.endswith('.tar.gz') or x.endswith('.tar.bz2'))]
if new_basename:
local_filename = os.path.join(dirname, new_basename[0])
logging.debug('using %s instead of %s' % (new_basename[0], basename))
except OSError:
pass
return (local_filename, open(local_filename))
def listdir(self, dirname):
l = []
for line in urllib2.urlopen('ftp://ftp.gnome.org/' + dirname):
l.append(line.strip().split()[-1])
return l
def listdir_local(self, dirname):
try:
return sorted(os.listdir(os.path.join(self.ftp_gnome_org_local_copy, dirname)))
except OSError:
return []
class Lgo(App):
'''Main Application Class'''
def run(self):
self.ftp_gnome_org = FtpDotGnomeDotOrg(self.config)
self.overlay = Overlay(os.path.join(data_dir, 'overlay.xml'))
self.get_yelp_categories()
if self.options.rebuild_module:
self.rebuild_all = True
for doc_module in self.extract_modules(self.options.rebuild_module):
doc_module.process()
sys.exit(0)
if self.options.rebuild_remote:
self.generate_static_pages()
self.apply_overlay()
sys.exit(0)
self.copy_static_files()
self.process_releases()
if not self.options.skip_extra_tarballs:
self.process_extra_tarballs()
if self.config.nightly_tarballs_location:
self.process_nightly_tarballs()
self.apply_overlay()
self.generate_indexes()
self.generate_html_indexes()
self.generate_robots()
self.generate_sitemap()
self.generate_symbols_files()
self.generate_static_pages()
def get_yelp_categories(self):
logging.info('Getting categories from Yelp')
scrollkeeper_xml = os.path.join(data_dir, 'externals', 'scrollkeeper.xml')
toc_xml = os.path.join(data_dir, 'externals', 'toc.xml')
if not os.path.exists(os.path.join(data_dir, 'externals')):
os.mkdir(os.path.join(data_dir, 'externals'))
if not os.path.exists(scrollkeeper_xml) or not os.path.exists(toc_xml):
filename = FtpDotGnomeDotOrg(self.config).download(
'pub/GNOME/sources/yelp/3.0/yelp-3.0.3.tar.bz2')[0]
tar = tarfile.open(filename, 'r')
done = 0
for tarinfo in tar:
filename = os.path.basename(tarinfo.name)
if filename == 'scrollkeeper.xml':
open(scrollkeeper_xml, 'w').write(tar.extractfile(tarinfo).read())
done += 1
elif filename == 'toc.xml':
open(toc_xml, 'w').write(tar.extractfile(tarinfo).read())
done += 1
if done == 2:
break
yelp_toc_tree = ET.fromstring(open(scrollkeeper_xml).read())
self.toc_mapping = {}
for subtoc in yelp_toc_tree.findall('toc'):
sub_id = subtoc.attrib['id']
for subject in subtoc.findall('subject'):
self.toc_mapping[subject.attrib['category']] = sub_id
def copy_static_files(self):
App.copy_static_files(self)
src = os.path.join(data_dir, 'gnome-library-search.xml')
dst = os.path.join(self.config.output_dir, 'gnome-library-search.xml')
if not os.path.exists(dst) or \
os.stat(src)[stat.ST_MTIME] > os.stat(dst)[stat.ST_MTIME]:
open(dst, 'w').write(open(src, 'r').read())
def process_releases(self):
'''Download GNOME releases'''
releases = self.ftp_gnome_org.listdir('pub/GNOME/teams/releng/')
releases = [x for x in releases if is_version_number(x)]
for i, r in enumerate(releases[:]):
if self.config.version_min and version_cmp(r, self.config.version_min) < 0:
continue
if self.config.version_max and version_cmp(r, self.config.version_max) > 0:
continue
if i < len(releases)-1 and releases[i+1].startswith(re.match(r'\d+\.\d+\.', r).group()) and \
not r == self.config.version_max:
# next release has the same major.minor version number, so skip
# this one and get the newer one later
logging.debug('skipping release %s, not the last in serie' % r)
continue
if int(r.split('.')[1]) % 2 == 1:
# odd release, development, skip unless this is the current
# development serie
if not releases[-1].startswith(re.match(r'\d+\.\d+\.', r).group()) and \
not r == self.config.version_max:
logging.debug('skipping release %s, not the last in serie' % r)
continue
if version_cmp(r, '2.19.0') < 0:
urls = ['pub/GNOME/teams/releng/%(r)s/gnome-%(r)s.modules']
elif version_cmp(r, '2.91.3') < 0:
urls = ['pub/GNOME/teams/releng/%(r)s/gnome-suites-%(r)s.modules']
else:
urls = ['pub/GNOME/teams/releng/%(r)s/gnome-apps-%(r)s.modules',
'pub/GNOME/teams/releng/%(r)s/gnome-suites-core-%(r)s.modules',
'pub/GNOME/teams/releng/%(r)s/gnome-suites-core-deps-%(r)s.modules']
logging.info('Getting GNOME release: %s' % r)
for url in urls:
try:
moduleset = self.ftp_gnome_org.download(url % {'r': r})[1]
except Exception, e:
logging.error('Failed retrieving %s (%s)' % (url % {'r': r}, str(e)))
continue
self.process_moduleset(moduleset, r)
if self.config.version_max is None and r == releases[-1]:
# this is the last release, and no maximum version was set,
# add all modules to extra tarballs, so development versions
# not shipped with latest GNOME are made available.
moduleset.seek(0)
self.process_latest_moduleset(moduleset)
def download(self, url, use_cache=True):
if url.startswith('gnome://'):
# special schema for modules on ftp.gnome.org; URL scheme is
# gnome://<modulename>?min=<minimum version>
# (the parameters are optional)
# it may be used to define extra tarballs and get new versions
# picked automatically
if '?' in url:
module, params = url[8:].split('?')
params = [x.split('=') for x in params.split('&')]
try:
min_version = [x[1] for x in params if x[0] == 'min'][0]
# number versions used as ftp.gnome.org directories are
# limited to two numbers
min_version = '.'.join(min_version.split('.')[:2])
except IndexError:
min_version = None
else:
module = url[8:]
min_version = None
base_module_href = 'pub/GNOME/sources/%s/' % module
releases = self.ftp_gnome_org.listdir(base_module_href)
releases = [x for x in releases if is_version_number(x) and
not (x.endswith('.png') # ignore icons
or x.endswith('.json'))] # ignore json
releases.sort(version_cmp)
extra_filenames = []
for i, version_dir in enumerate(releases):
if min_version and version_cmp(version_dir, min_version) < 0:
continue
try:
# check all parts are numbers
[int(x) for x in version_dir.split('.')]
except ValueError:
continue
if int(version_dir.split('.')[1]) % 2 == 1 or int(version_dir.split('.')[0]) == 0:
# development release, (odd minor version, or major version
# as 0): skip unless this is the current development serie
if version_dir.count('.') == 1:
if not version_dir == releases[-1]:
continue
elif not releases[-1].startswith(re.match(r'\d+\.\d+\.', version_dir).group()):
continue
base_version_href = '%s%s/' % (base_module_href, version_dir)
filenames = self.ftp_gnome_org.listdir(base_version_href)
filenames = [x for x in filenames if \
x.startswith(module + '-%s.' % version_dir) and \
x.endswith('.tar.bz2')]
def filenames_cmp(x, y):
return version_cmp(x[len(module)+1:-8], y[len(module)+1:-8])
filenames.sort(filenames_cmp)
if not filenames:
continue
version_href = '%s%s/%s' % (
base_module_href, version_dir, filenames[-1])
extra_filenames.append(self.ftp_gnome_org.download(version_href)[0])
if len(extra_filenames) == 1:
return extra_filenames[0]
return extra_filenames
elif url.startswith('http://download.gnome.org/') or \
url.startswith('http://ftp.gnome.org/'):
url = url.replace('http://download.gnome.org/', 'pub/GNOME/')
url = url.replace('http://ftp.gnome.org/', '')
try:
filename = self.ftp_gnome_org.download(url)[0]
except IOError:
logging.error('error downloading %s' % url)
return
else:
filename = App.download(self, url, use_cache=use_cache)
return filename
def process_moduleset(self, moduleset, version_number):
'''Download tarballs from a module set'''
doc_modules = []
tree = ET.parse(moduleset)
repositories = {}
for repository in tree.findall('repository'):
if repository.attrib['type'] != 'tarball':
continue
repositories[repository.attrib['name']] = repository.attrib['href']
self.repositories = repositories
hrefs = []
for tarball in tree.findall('tarball'):
if self.config.modules is not None and not tarball.attrib['id'] in self.config.modules:
continue
href = tarball.find('source').attrib['href']
hrefs.append((tarball, href))
for module in tree.findall('autotools'):
if self.config.modules is not None and not module.attrib['id'] in self.config.modules:
continue
branch = module.find('branch')
if branch is None:
continue
repository = repositories.get(branch.attrib.get('repo'))
if repository is None:
continue
href = repository + branch.attrib.get('module')
hrefs.append((module, href))
for module, href in hrefs:
if not (href.startswith('http://download.gnome.org/') or
href.startswith('http://ftp.gnome.org/')):
continue
filename = self.download(href)
if not filename:
continue
logging.info('extracting module %s (from %s moduleset)' % (
module.attrib['id'], version_number))
doc_modules.extend(self.extract_modules(filename))
gduxrefs = ET.Element('gduxrefs')
for doc_module in doc_modules:
if not isinstance(doc_module, GnomeDocbookModule):
continue
element = ET.SubElement(gduxrefs, 'doc')
element.set('id', doc_module.modulename)
element.set('path', doc_module.path)
# XXX: this is writing in build dir, not really nice, but this allows
# for a known location relative to the XSL files.
tmp_dirname = os.path.join(data_dir, 'tmp')
if not os.path.exists(tmp_dirname):
os.makedirs(tmp_dirname)
tree = ET.ElementTree(gduxrefs)
tree.write(os.path.join(tmp_dirname, 'gduxrefs.xml'))
for doc_module in doc_modules:
logging.info('processing %s (from %s moduleset)' % (
doc_module, version_number))
doc_module.process()
def process_latest_moduleset(self, moduleset):
'''Register all modules from latest modulesets as extra tarballs
with minimum version set to version+epsilon'''
tree = ET.parse(moduleset)
for tarball in tree.findall('tarball'):
if self.config.modules is not None and not tarball.attrib['id'] in self.config.modules:
continue
url = tarball.find('source').attrib['href']
if not (url.startswith('http://download.gnome.org/') or
url.startswith('http://ftp.gnome.org/')):
continue
min_version = tarball.attrib.get('version')
if not min_version:
continue
min_version += '.1'
logging.info('registering module %s for extra tarballs' % tarball.attrib['id'])
extra_tarball = 'gnome://%s?min=%s' % (tarball.attrib['id'], min_version)
self.config.extra_tarballs.append(extra_tarball)
# XXX: this needs to be refactored, and moduleset processing code to be
# shared with process_moduleset
for module in tree.findall('autotools'):
if self.config.modules is not None and not module.attrib['id'] in self.config.modules:
continue
branch = module.find('branch')
if branch is None:
continue
repository = self.repositories.get(branch.attrib.get('repo'))
if repository is None:
continue
url = repository + branch.attrib.get('module')
if not (url.startswith('http://download.gnome.org/') or
url.startswith('http://ftp.gnome.org/')):
continue
min_version = branch.attrib.get('version')
if not min_version:
continue
min_version += '.1'
logging.info('registering module %s for extra tarballs' % module.attrib['id'])
extra_tarball = 'gnome://%s?min=%s' % (module.attrib['id'], min_version)
self.config.extra_tarballs.append(extra_tarball)
def extract_modules(self, filename, nightly = False):
logging.debug('looking for doc modules in %s' % filename)
doc_modules = []
mtime = os.stat(filename)[stat.ST_MTIME]
if self.config.fast_mode:
ext_dirname = os.path.join(app.config.private_dir, 'extracts',
os.path.splitext(os.path.splitext(os.path.basename(filename))[0])[0])
stamp_file = ext_dirname + '.extract-stamp'
else:
stamp_file = None
base_tarball_name = os.path.basename(filename).rsplit('-', 1)[0]
if nightly:
stamp_file = None
if stamp_file and os.path.exists(stamp_file) and not os.path.exists(ext_dirname):
# file was extracted once, and no doc module were found inside
return []
elif stamp_file and os.path.exists(stamp_file) and os.stat(stamp_file)[stat.ST_MTIME] < mtime:
# file was extracted but has been modified since then, remove
# extraction if it exists:
if os.path.exists(ext_dirname):
logging.debug('removing old copy of files from %s' % base_tarball_name)
shutil.rmtree(ext_dirname, ignore_errors = True)
tar = tarfile.open(filename, 'r')
elif stamp_file and os.path.exists(stamp_file):
tar = utils.FakeTarFile(ext_dirname)
else:
tar = tarfile.open(filename, 'r')
doc_version = os.path.splitext(tar.name)[0].split('-')[-1]
if doc_version.endswith('.tar'):
doc_version = doc_version[:-4]
more_tarball_docs = self.overlay.more_tarball_docs.get(
base_tarball_name, [])[:]
for more_doc in more_tarball_docs:
# don't look for docs that require a newer version of the module
if 'minimum-version' in more_doc.attrib:
if version_cmp(doc_version, more_doc.attrib.get('minimum-version')) < 0:
more_tarball_docs.remove(more_doc)
extraction_happened = False
regex_gdu = re.compile(r'include.*gnome-doc-utils.make', re.DOTALL)
for tarinfo in tar:
doc = None
if os.path.split(tarinfo.name)[-1] in ('Makefile.am', 'GNUmakefile.am'):
fd = tar.extractfile(tarinfo)
makefile_am = fd.read()
# merge lines continued with \
makefile_am = re.sub(r'\\\s*\n', r' ', makefile_am)
if 'HELP_ID' in makefile_am and '@YELP_HELP_RULES@' in makefile_am:
logging.debug('found usage of mallard (via YELP_HELP_RULES) in %s' % tarinfo.name)
doc = MallardModule.create_from_tar(tar, tarinfo, makefile_am, nightly)
elif 'DOC_ID' in makefile_am and regex_gdu.findall(makefile_am):
logging.debug('found usage of mallard in %s' % tarinfo.name)
doc = MallardModule.create_from_tar(tar, tarinfo, makefile_am, nightly)
elif 'DOC_MODULE' in makefile_am and regex_gdu.findall(makefile_am):
logging.debug('found usage of docbook in %s' % tarinfo.name)
doc = GnomeDocbookModule.create_from_tar(tar, tarinfo, makefile_am, nightly)
elif 'include $(top_srcdir)/gtk-doc.make' in makefile_am or \
'include $(srcdir)/gtk-doc.make' in makefile_am or \
'include gtk-doc.make' in makefile_am or \
('gtkdoc-scan' in makefile_am and not 'gtk-doc' in tarinfo.name):
logging.debug('found usage of gtk-doc in %s' % tarinfo.name)
doc = GtkDocModule.create_from_tar(tar, tarinfo, makefile_am, nightly)
elif 'SUBDIRS = C' in makefile_am and \
os.path.basename(filename).startswith('gtk-doc-'):
logging.debug('found gtk-doc almost gnome-doc-utils manual in %s' % tarinfo.name)
makefile_am += '\nDOC_MODULE = gtk-doc-manual\n'
doc = GnomeDocbookModule.create_from_tar(tar, tarinfo, makefile_am, nightly)
else:
continue
if '$(' in doc.modulename:
continue
if not doc.modulename or doc.modulename in self.config.blacklist:
continue
else:
for more_doc in more_tarball_docs[:]:
if not tarinfo.isdir():
continue
directory_name = tarinfo.name
if not directory_name[-1] == '/':
directory_name += '/'
if directory_name.endswith(more_doc.attrib.get('dir')):
doc = HtmlFilesModule.create_from_tar(tar, tarinfo, more_doc)
more_tarball_docs.remove(more_doc)
continue
if doc:
doc.filename = filename
doc.mtime_tarball = mtime
if extraction_happened:
doc.extract(force=True)
else:
extraction_happened = doc.extract()
doc.setup_channel()
doc.path = self.get_module_web_path(doc)
if self.config.channels is None or doc.channel in self.config.channels:
doc_modules.append(doc)
else:
logging.debug('ignoring %s, not in an appropriate channel' % doc.modulename)
if more_tarball_docs:
for more_doc in more_tarball_docs:
logging.error('[%s] overlay file mentioned %s but it was not found' % \
(base_tarball_name, more_doc.attrib.get('dir')))
tar.close()
if stamp_file:
# touch extract stamp file
if not os.path.exists(ext_dirname):
os.makedirs(ext_dirname)
file(stamp_file, 'w').close()
return doc_modules
def process_extra_tarballs(self):
if self.config.extra_tarballs:
logging.info('processing extra tarballs')
for url in self.config.extra_tarballs:
logging.debug('processing extra tarball: %s' % url)
filename = self.download(url)
if not filename:
continue
if not type(filename) is list:
filename = [filename]
for fname in filename:
for doc_module in self.extract_modules(fname):
doc_module.process()
def process_nightly_tarballs(self):
logging.info('processing nightly tarballs')
for filename in os.listdir(self.config.nightly_tarballs_location):
if not (filename.endswith('.tar.gz') or filename.endswith('.tar.bz2')):
continue
filename = os.path.join(self.config.nightly_tarballs_location, filename)
for doc_module in self.extract_modules(filename, nightly = True):
doc_module.process()
def generate_indexes(self):
logging.info('generating indexes')
indexes = ET.Element('indexes')
# get all possible languages
languages = {}
for doc in self.documents:
for lang in doc.languages:
if lang == 'C':
continue # ignore
if self.config.languages and not lang in self.config.languages:
continue
languages[lang] = True
if 'sr@Latn' in languages and 'sr@latin' in languages:
del languages['sr@Latn']
for lang in languages.keys():
home = ET.SubElement(indexes, 'home')
home.set('lang', lang)
channels = self.config.channels
if not channels:
channels = ('users', 'devel', 'admin', 'misc')
for channel in channels:
docs = [x for x in self.documents if x.channel == channel]
if not docs:
continue
for lang in languages.keys():
logging.debug('generating index for lang %s' % lang)
sections = {}
for x in docs:
if x.toc_id is None:
logging.warning('doc %s has no toc id -> default to Other' % x.module)
x.toc_id = 'Other'
sections[x.toc_id] = True
sections = sections.keys()
sections.sort()
docs.sort(lambda x,y: cmp(x.title.get(lang), y.title.get(lang)))
subindexes = self.overlay.get_subindexes(channel)
if not subindexes:
index = ET.SubElement(indexes, 'index')
index.set('lang', lang)
index.set('channel', channel)
for section in sections:
section_docs = [x for x in docs if x.toc_id == section]
if not section_docs:
continue
self.create_section(index, section, section_docs, lang)
else:
remaining_sections = sections[:]
subindex_index = ET.SubElement(indexes, 'index')
subindex_index.set('lang', lang)
subindex_index.set('channel', channel)
for subindex in subindexes:
local_sections = [x for x in sections if x in subindex.sections]
if not local_sections:
continue
index = subindex.create_element(subindex_index,
channel, lang)
for section in local_sections:
remaining_sections.remove(section)
section_docs = [x for x in docs if x.toc_id == section]
if not section_docs:
continue
self.create_section(index, section, section_docs, lang)
if remaining_sections:
logging.warn('%s channel is missing some sections: %s' % (
channel, ', '.join(remaining_sections)))
self.indexes_tmp_file = tempfile.NamedTemporaryFile()
tree = ET.ElementTree(indexes)
tree.write(self.indexes_tmp_file.name)
if self.debug:
tree.write('/tmp/library-web-indexes.xml.%s' % os.getpid())
def create_section(self, index, section, section_docs, lang):
section_node = ET.SubElement(index, 'section')
section_node.set('toc_id', section)
section_node.set('weight', str(
self.overlay.get_section_weight(section)))
subsections = {}
for x in section_docs:
subsections[x.subsection] = True
subsections = subsections.keys()
for subsection in subsections:
subsection_docs = [x for x in section_docs if x.subsection == subsection]
if subsection is None:
parent_elem = section_node
else:
parent_elem = ET.SubElement(section_node, 'section')
parent_elem.set('title', subsection)
parent_elem.set('weight', str(
self.overlay.get_section_weight(subsection)))
for doc in subsection_docs:
logging.debug('generating index for module %s' % doc.module)
if lang in doc.languages:
# document is available in the requested
# language, perfect.
doc_lang = lang
elif lang[:2] in doc.languages:
# mapping to "general" language, for example
# from en_GB to en, from fr_BE to fr...
doc_lang = lang[:2]
elif [x for x in doc.languages if x[:2] == lang]:
# mapping to "country" language, for
# example from pt to pt_BR
doc_lang = [x for x in doc.languages if x[:2] == lang][0]
else:
# fallback to English
doc_lang = 'en'
doc.create_element(parent_elem, doc_lang,
original_language = lang)
def generate_from_indexes(self, xsl_filename):
idx_filename = self.indexes_tmp_file.name
cmd = ['xsltproc', '--output', self.config.output_dir,
'--nonet', '--xinclude', xsl_filename, idx_filename]
if self.debug:
cmd.insert(-2, '--param')
cmd.insert(-2, 'libgo.debug')
cmd.insert(-2, 'true()')
if self.config.symbols_dbm_filepath:
cmd.insert(-2, '--param')
cmd.insert(-2, 'libgo.dbm_support')
cmd.insert(-2, 'true()')
logging.debug('executing %s' % ' '.join(cmd))
rc = subprocess.call(cmd)
if rc != 0:
logging.warn('%s failed with error %d' % (' '.join(cmd), rc))
def generate_html_indexes(self):
logging.info('generating index.html files')
indexes_xsl_file = self.config.indexes_xsl_file or self.default_indexes_xsl_file
self.generate_from_indexes(indexes_xsl_file)
def generate_robots(self):
logging.info('generating robots.txt file')
robots_xsl_file = os.path.join(data_dir, 'xslt', 'robots.xsl')
self.generate_from_indexes(robots_xsl_file)
def generate_sitemap(self):
logging.info('generating sitemap file')
sitemap_xsl_file = os.path.join(data_dir, 'xslt', 'sitemap.xsl')
self.generate_from_indexes(sitemap_xsl_file)
def generate_static_pages(self):
try:
doc_linguas = re.findall(r'DOC_LINGUAS\s+=[\t ](.*)',
file(os.path.join(data_dir, 'pages', 'Makefile.am')).read())[0].split()
doc_linguas.append('C')
except IndexError:
doc_linguas = ['C']
if app.config.languages:
for lang in doc_linguas[:]:
if lang not in self.config.languages + ['C']:
doc_linguas.remove(lang)
web_output_dir = os.path.join(self.config.output_dir, 'about')
for lang in doc_linguas:
xml_file = os.path.join(os.path.join(data_dir, 'pages', lang, 'libgo.xml'))
if lang == 'C':
lang = 'en'
cmd = ['xsltproc', '--output', web_output_dir + '/',
'--nonet', '--xinclude',
'--stringparam', 'libgo.lang', lang,
'--stringparam', 'libgo.channel', 'about',
'--param', 'db2html.navbar.bottom', 'false()',
GnomeDocbookModule.db2html_xsl_file, xml_file]
logging.debug('executing %s' % ' '.join(cmd))
rc = subprocess.call(cmd)
if rc != 0:
logging.warn('%s failed with error %d' % (' '.join(cmd), rc))
def get_module_web_path(self, module, versioned=True):
base_path = self.config.doc_path_template % {
'channel': module.channel,
'module': module.modulename }
licence_modules = ['fdl', 'gpl', 'lgpl']
if module.modulename in licence_modules or versioned is False:
# special casing the licences, they do not go in a
# versioned path
return base_path
else:
return base_path + module.one_dot_version + '/'
def get_module_web_output_dir(self, module, versioned=True):
return os.path.join(self.config.output_dir,
self.get_module_web_path(module, versioned=versioned)[1:])
if __name__ == '__main__':
app = Lgo()
app.Document = Document
app.run()