2 # Copyright (C) 2007, Thomas Leonard
3 # See the COPYING file for details, or visit http://0install.net.
5 from optparse
import OptionParser
6 import os
, sys
, time
, shutil
, subprocess
, signal
, logging
7 from ConfigParser
import RawConfigParser
8 from logging
import info
, debug
, warn
9 from xml
.dom
import minidom
12 signal
.alarm(10 * 60) # Abort after 10 minutes
14 from zeroinstall
import SafeException
15 from zeroinstall
.injector
.iface_cache
import iface_cache
16 from zeroinstall
.injector
import model
, namespaces
, config
, gpg
, handler
17 from zeroinstall
.support
import basedir
, tasks
19 from atom
import AtomFeed
, set_element
20 from stats
import Stats
21 from support
import format_date
, get_feed_dir
, ensure_dirs
27 site_config_file
= os
.path
.abspath('0mirror.ini')
28 FEED_TIMEOUT
= 60 # Seconds to wait before giving up on a feed download
32 parser
= OptionParser(usage
="usage: %prog [options] PUBLIC-DIR")
33 parser
.add_option("-v", "--verbose", help="more verbose output", action
='count')
34 parser
.add_option("-V", "--version", help="display version information", action
='store_true')
36 (options
, args
) = parser
.parse_args()
39 print "0mirror (zero-install) " + version
40 print "Copyright (C) 2010 Thomas Leonard"
41 print "This program comes with ABSOLUTELY NO WARRANTY,"
42 print "to the extent permitted by law."
43 print "You may redistribute copies of this program"
44 print "under the terms of the GNU General Public License."
45 print "For more information about these matters, see the file named COPYING."
49 logger
= logging
.getLogger()
50 if options
.verbose
== 1:
51 logger
.setLevel(logging
.INFO
)
53 logger
.setLevel(logging
.DEBUG
)
59 if not os
.path
.exists(site_config_file
):
60 print >>sys
.stderr
, "Configuration file '%s' not found!" % site_config_file
62 print "Reading configuration from", site_config_file
64 site_config
= RawConfigParser()
65 site_config
.read(site_config_file
)
67 site_address
= site_config
.get('site', 'address') # e.g. "http://localhost/0mirror"
68 if not site_address
.endswith('/'):
71 # Where we try if the primary site fails
72 my_mirror
= site_config
.get('fetching', 'upstream_mirror') or None
74 n_feeds_to_update
= int(site_config
.get('fetching', 'n_feeds_to_update'))
78 feed_file
= os
.path
.join(public_dir
, 'feed-list')
79 ignore_file
= os
.path
.join(public_dir
, 'ignore-list')
80 warnings_file
= os
.path
.join(public_dir
, 'warnings.xml')
83 <summary type='xhtml'>
84 <div xmlns="http://www.w3.org/1999/xhtml">
85 <a href=""/> - <span/>
91 <summary type='xhtml'>
92 <div xmlns="http://www.w3.org/1999/xhtml">
97 unconfirmed_keys
= [] # List of PendingFeeds
98 class NonInteractiveHandler(handler
.Handler
):
99 def confirm_import_feed(self
, pending
, valid_sigs
):
101 warn("Need to check key %s for %s", x
.fingerprint
, pending
.url
)
102 unconfirmed_keys
.append(pending
)
106 def wait_with_timeout(delay
, blocker
):
107 timeout
= tasks
.TimeoutBlocker(FEED_TIMEOUT
, 'Mirror timeout')
108 yield timeout
, blocker
109 tasks
.check([timeout
, blocker
])
110 if not blocker
.happened
:
111 raise Exception("Timeout (waited %d seconds)" % delay
)
114 def add_warning(title
, msg
):
115 warn("%s: %s", title
, msg
)
116 warnings
.append((title
, msg
))
118 key_dir
= os
.path
.join(public_dir
, 'keys')
121 def ensure_key(fingerprint
):
122 if fingerprint
in keys
:
124 key_path
= os
.path
.join(key_dir
, fingerprint
[-16:] + '.gpg')
125 child
= subprocess
.Popen(['gpg', '-a', '--export', fingerprint
], stdout
= subprocess
.PIPE
)
126 keydata
, unused
= child
.communicate()
127 stream
= file(key_path
, 'w')
128 stream
.write(keydata
)
130 print "Exported key", fingerprint
131 keys
.add(fingerprint
)
135 now
= format_date(time
.time())
136 news_feed
= AtomFeed(title
= "Zero Install News Feed",
137 link
= site_address
+ "/news-feed.xml",
140 warnings_feed
= AtomFeed(title
= "0mirror Warnings Feed",
141 link
= site_address
+ "/warnings.xml",
144 source
= warnings_file
)
147 return iface_cache
.get_feed(url
)
149 def load_feeds(feed_uris
):
150 logging
.getLogger("0install").setLevel(logging
.ERROR
)
154 for feed_url
in feed_uris
:
155 feeds
[feed_url
] = load_feed(feed_url
)
158 logging
.getLogger("0install").setLevel(logging
.WARNING
)
163 if not os
.path
.isdir(public_dir
):
164 raise SafeException("Public directory '%s' does not exist. "
165 "To setup a new site, create it as an empty directory now." % public_dir
)
166 if not os
.path
.isfile(feed_file
):
167 raise SafeException("File '%s' does not exist. It should contain a list of feed URLs, one per line" % feed_file
)
168 print "Reading", feed_file
170 lines
= filter(None, file(feed_file
).read().split('\n'))
171 feed_uris
= [line
for line
in lines
if not line
.startswith('-')]
172 feed_set
= set(feed_uris
)
173 ignore_set
= set(filter(None, file(ignore_file
).read().split('\n')))
174 inactive_set
= set(line
[1:] for line
in lines
if line
.startswith('-'))
176 known_set
= feed_set | inactive_set
178 stale_feeds
= [] # [(last-checked, feed)]
180 c
= config
.load_config()
183 feeds
= load_feeds(feed_uris
)
185 def last_checked(feed
):
187 # If we've never downloaded this feed, just keep trying (ignore last_check_attempt)
189 # Use the latest of the last successful check or the last failed check
190 last_check_attempt
= iface_cache
.get_last_check_attempt(feed
.url
)
191 if not last_check_attempt
:
192 return feed
.last_checked
193 return max(feed
.last_checked
or 0, last_check_attempt
)
195 # List all the feeds, starting with the most stale
196 stale_feeds
= [(last_checked(feed
), url
, feed
) for url
, feed
in feeds
.items()]
199 # If we've got some completely new feeds, update all of them now
200 while n_feeds_to_update
< len(stale_feeds
) and stale_feeds
[n_feeds_to_update
- 1][0] in (0, None):
201 n_feeds_to_update
+= 1
203 # Update the first few feeds in the list
204 stale_feeds
= stale_feeds
[:n_feeds_to_update
]
205 for last_check
, feed_url
, feed
in stale_feeds
:
207 ctime_str
= time
.strftime('%Y-%m-%d_%H:%M', time
.gmtime(last_check
))
208 print "Feed %s last checked %s; updating..." % (feed_url
, ctime_str
)
210 print "Feed %s is new; fetching..." % feed_url
212 iface_cache
.mark_as_checking(feed_url
)
213 blocker
= c
.fetcher
.download_and_import_feed(feed_url
, iface_cache
)
215 tasks
.wait_for_blocker(wait_with_timeout(FEED_TIMEOUT
, blocker
))
216 except Exception, ex
:
217 add_warning("Error fetching feed", "Error fetching '%s': %s" % (feed_url
, ex
))
220 feed
= feeds
[feed_url
] = load_feed(feed_url
)
221 #assert feed.last_checked, feed
223 for feed_url
in feed_uris
:
224 info("Processing feed '%s'", feed_url
)
225 feed
= feeds
[feed_url
]
227 # Error during download?
228 add_warning("Fetch failed", "Attempted to fetch '%s', but still not cached" % feed_url
)
231 feed_dir
= os
.path
.join(public_dir
, get_feed_dir(feed_url
))
232 ensure_dirs(feed_dir
)
234 cached
= basedir
.load_first_cache(namespaces
.config_site
, 'interfaces', model
.escape(feed
.url
))
235 assert cached
is not None
237 for subfeed
in feed
.feeds
:
238 if subfeed
.uri
not in known_set
:
239 if subfeed
.uri
.startswith('/'):
241 if subfeed
.uri
not in ignore_set
:
242 add_warning("Missing subfeed", "WARNING: Subfeed %s of %s not in feeds list" % (subfeed
.uri
, feed
.get_name()))
245 for impl
in feed
.implementations
.values():
246 for dep
in impl
.requires
:
247 if dep
.interface
not in known_set
and dep
.interface
not in missing_set
:
248 add_warning("Missing dependency", "Version %s of %s depends on %s, but that isn't being mirrored!" % (impl
.get_version(), feed
.url
, dep
.interface
))
249 missing_set
.add(dep
.interface
)
252 break # Once we've warned about one version, don't check any other versions
254 style
= os
.path
.join(feed_dir
, 'interface.xsl')
255 if not os
.path
.islink(style
):
256 os
.symlink('../../../../feed_style.xsl', style
)
258 latest
= os
.path
.join(feed_dir
, 'latest.xml')
260 last_modified
= int(os
.stat(cached
).st_mtime
)
261 version_name
= time
.strftime('%Y-%m-%d_%H:%M.xml', time
.gmtime(last_modified
))
262 version_path
= os
.path
.join(feed_dir
, version_name
)
264 if os
.path
.islink(latest
) and os
.readlink(latest
) == version_name
:
265 if os
.path
.exists(version_path
):
267 warn("Broken symlink '%s'!", latest
)
270 stream
= file(cached
)
271 unused
, sigs
= gpg
.check_stream(stream
)
275 if isinstance(x
, gpg
.ValidSig
):
276 ensure_key(x
.fingerprint
)
278 add_warning("Signature problem", x
)
280 shutil
.copyfile(cached
, version_path
)
281 latest_new
= latest
+ '.new'
282 if os
.path
.exists(latest_new
):
283 os
.unlink(latest_new
)
284 os
.symlink(version_name
, latest_new
)
285 os
.rename(latest_new
, latest
)
286 print "Updated %s to %s" % (feed
, version_name
)
290 for feed_url
in known_set
:
291 feed
= load_feed(feed_url
)
292 if feed
and feed
.last_modified
:
293 stats
.add_feed(feed
, feed_url
in feed_set
)
295 stats
.write_summary(public_dir
)
297 for pending_feed
in unconfirmed_keys
:
298 add_warning("Key awaiting confirmation",
299 "Feed: {feed}, Fingerprint: {fingerprint}".format(
300 feed
= pending_feed
.url
,
301 fingerprint
= pending_feed
.sigs
[0].fingerprint
))
305 for (title
, warning
) in warnings
:
306 summary
= minidom
.parseString(warnings_xml
)
307 div
= summary
.getElementsByTagNameNS("http://www.w3.org/1999/xhtml", "div")[0]
308 div
.appendChild(summary
.createTextNode(warning
))
309 warnings_feed
.add_entry(title
= title
,
310 link
= site_address
+ "/warnings.xml",
311 entry_id
= "warning-" + now
+ '-%d' % i
,
313 summary
= summary
.documentElement
)
315 warnings_feed
.limit(20)
316 with
open(warnings_file
, 'w') as stream
:
317 warnings_feed
.save(stream
)
319 latest_feeds
= [(feed
.last_modified
, feed
) for feed
in feeds
.values() if feed
]
321 latest_feeds
= reversed(latest_feeds
[-16:])
322 for date
, feed
in latest_feeds
:
323 summary
= minidom
.parseString(summary_xml
)
324 set_element(summary
, "summary/div/a", feed
.get_name())
325 local_html_page
= site_address
+ "/" + get_feed_dir(feed
.url
).replace('#', '%23') + "/feed.html"
326 set_element(summary
, "summary/div/a/@href", local_html_page
)
327 set_element(summary
, "summary/div/span", feed
.summary
)
328 news_feed
.add_entry(title
= "%s feed updated" % feed
.get_name(),
329 link
= local_html_page
,
331 updated
= format_date(date
),
332 summary
= summary
.documentElement
)
334 news_stream
= codecs
.open(os
.path
.join(public_dir
, 'news-feed.xml'), 'w', encoding
= 'utf-8')
335 news_feed
.save(news_stream
)
339 # Warn about possible missing feeds...
340 child
= subprocess
.Popen(['0launch', '--list'], stdout
= subprocess
.PIPE
)
341 all_feeds
, unused
= child
.communicate()
342 all_feeds
= set([x
for x
in all_feeds
.split('\n') if x
and not x
.startswith('/')])
343 unknown
= all_feeds
- known_set
346 print "\nUnknown feeds (add to known or ignore lists):"
347 for feed
in sorted(unknown
):
348 if '/tests/' in feed
: continue
352 print "\nMissing feeds:"
353 for x
in missing_set
:
356 except KeyboardInterrupt, ex
:
357 print >>sys
.stderr
, "Aborted at user's request"
359 except SafeException
, ex
:
360 if options
.verbose
: raise
361 print >>sys
.stderr
, ex
363 print "(while processing %s)" % feed