@@ -39,7 +39,7 @@ def get_feed_timestamp(soup_feed, default):
39
39
if value :
40
40
# Fix future dates
41
41
return min (tuple_as_datetime (value ), default )
42
- logger .debug ('no feed timestamp found, using default' )
42
+ logger .debug (u 'no feed timestamp found, using default' )
43
43
return default
44
44
45
45
def get_entry_id (entry , default ):
@@ -59,7 +59,7 @@ def get_entry_timestamp(entry, default):
59
59
if value :
60
60
# Fix future dates
61
61
return min (tuple_as_datetime (value ), default )
62
- logger .debug ('no entry timestamp found, using default' )
62
+ logger .debug (u 'no entry timestamp found, using default' )
63
63
return default
64
64
65
65
def get_entry_title (entry , default ):
@@ -74,7 +74,7 @@ def get_entry_content(entry, default):
74
74
75
75
candidates = entry .get ('content' , [])
76
76
if 'summary_detail' in entry :
77
- #logger.debug('summary found for entry %s' % entry.link)
77
+ #logger.debug(u 'summary found for entry %s' % entry.link)
78
78
candidates .append (entry .summary_detail )
79
79
for c in candidates :
80
80
# Match text/html, application/xhtml+xml
@@ -84,7 +84,7 @@ def get_entry_content(entry, default):
84
84
if candidates :
85
85
return candidates [0 ].type , candidates [0 ].value
86
86
87
- logger .debug ('no content found for entry %s' % entry .link )
87
+ logger .debug (u 'no content found for entry %s' % entry .link )
88
88
return default
89
89
90
90
# Nullable fields
@@ -127,10 +127,10 @@ def load_plugins():
127
127
fp , pathname , description = imp .find_module (name , [plugin_dir ])
128
128
imp .load_module (name , fp , pathname , description )
129
129
except ImportError , ex :
130
- logger .warn ('could not load %s plugin (%s), ignored' % (name , ex ))
130
+ logger .warn (u 'could not load %s plugin (%s), ignored' % (name , ex ))
131
131
continue
132
132
133
- logger .debug ('loaded %s plugin' % name )
133
+ logger .debug (u 'loaded %s plugin' % name )
134
134
fp .close ()
135
135
136
136
def add_feed (feed , fetch_icon = False , add_entries = False ):
@@ -143,7 +143,7 @@ def add_feed(feed, fetch_icon=False, add_entries=False):
143
143
144
144
try :
145
145
previous_feed = Feed .get (Feed .self_link == feed .self_link )
146
- logger .debug ('feed %s has been already added to database, skipped' % feed .self_link )
146
+ logger .debug (u 'feed %s has been already added to database, skipped' % feed .self_link )
147
147
return previous_feed
148
148
except Feed .DoesNotExist :
149
149
pass
@@ -155,10 +155,10 @@ def add_subscription(feed, user, group):
155
155
try :
156
156
subscription = Subscription .create (user = user , feed = feed , group = group )
157
157
except IntegrityError :
158
- logger .debug ('user %s has already feed %s in her subscriptions' % (user .username , feed .self_link ))
158
+ logger .debug (u 'user %s has already feed %s in her subscriptions' % (user .username , feed .self_link ))
159
159
return None
160
160
161
- logger .debug ('added feed %s for user %s' % (feed .self_link , user .username ))
161
+ logger .debug (u 'added feed %s for user %s' % (feed .self_link , user .username ))
162
162
return subscription
163
163
164
164
# ------------------------------------------------------
@@ -179,7 +179,7 @@ def fetch_url(url, timeout=None, etag=None, modified_since=None):
179
179
180
180
try :
181
181
response = requests .get (url , timeout = timeout , headers = request_headers )
182
- logger .debug ("got status %d" % response .status_code )
182
+ logger .debug (u "got status %d" % response .status_code )
183
183
except (IOError , RequestException ), ex :
184
184
return None
185
185
@@ -215,7 +215,7 @@ def add_synthesized_entry(feed, title, content_type, content):
215
215
last_updated_on = now
216
216
)
217
217
entry .save ()
218
- logger .debug ("synthesized entry %s" % guid )
218
+ logger .debug (u "synthesized entry %s" % guid )
219
219
return entry
220
220
221
221
@@ -234,15 +234,15 @@ def post_fetch(status, error=False):
234
234
if error_threshold and (feed .error_count > error_threshold ):
235
235
feed .is_enabled = False
236
236
feed .last_status = status # Save status code for posterity
237
- logger .warn ("%s has too many errors, disabled" % netloc )
237
+ logger .warn (u "%s has too many errors, disabled" % netloc )
238
238
synthesize_entry ('Feed has accomulated too many errors (last was %s).' % status_title (status ))
239
239
feed .save ()
240
240
241
241
max_history = config .getint ('fetcher' , 'max_history' )
242
242
interval = config .getint ('fetcher' , 'min_interval' )
243
243
timeout = config .getint ('fetcher' , 'timeout' )
244
244
245
- logger .debug ("fetching %s" % feed .self_link )
245
+ logger .debug (u "fetching %s" % feed .self_link )
246
246
247
247
schema , netloc , path , params , query , fragment = urlparse .urlparse (feed .self_link )
248
248
@@ -256,14 +256,14 @@ def post_fetch(status, error=False):
256
256
# No datetime.timedelta since we need to deal with large seconds values
257
257
delta = datetime_as_epoch (now ) - datetime_as_epoch (value )
258
258
if delta < interval :
259
- logger .debug ("%s for %s is below min_interval, skipped" % (fieldname , netloc ))
259
+ logger .debug (u "%s for %s is below min_interval, skipped" % (fieldname , netloc ))
260
260
return
261
261
262
262
response = fetch_url (feed .self_link , timeout = timeout , etag = feed .etag , modified_since = feed .last_updated_on )
263
263
if not response :
264
264
# Record as "503 Service unavailable"
265
265
post_fetch (503 , error = True )
266
- logger .warn ("a network error occured while fetching %s" % netloc )
266
+ logger .warn (u "a network error occured while fetching %s" % netloc )
267
267
return
268
268
269
269
feed .last_checked_on = now
@@ -275,33 +275,33 @@ def post_fetch(status, error=False):
275
275
Feed .get (self_link = self_link )
276
276
except Feed .DoesNotExist :
277
277
feed .self_link = self_link
278
- logger .info ("%s has changed its location, updated to %s" % (netloc , self_link ))
278
+ logger .info (u "%s has changed its location, updated to %s" % (netloc , self_link ))
279
279
else :
280
280
feed .is_enabled = False
281
- logger .warn ("new %s location %s is duplicated, disabled" % (netloc , self_link ))
281
+ logger .warn (u "new %s location %s is duplicated, disabled" % (netloc , self_link ))
282
282
synthesize_entry ('Feed has a duplicated web address.' )
283
283
post_fetch (DuplicatedFeedError .code , error = True )
284
284
return
285
285
286
286
if response .status_code == 304 : # Not modified
287
- logger .debug ("%s hasn't been modified, skipped" % netloc )
287
+ logger .debug (u "%s hasn't been modified, skipped" % netloc )
288
288
post_fetch (response .status_code )
289
289
return
290
290
elif response .status_code == 410 : # Gone
291
291
feed .is_enabled = False
292
- logger .warn ("%s is gone, disabled" % netloc )
292
+ logger .warn (u "%s is gone, disabled" % netloc )
293
293
synthesize_entry ('Feed has been removed from the origin server.' )
294
294
post_fetch (response .status_code , error = True )
295
295
return
296
296
elif response .status_code not in POSITIVE_STATUS_CODES : # No good
297
- logger .warn ("%s replied with status %d, aborted" % (netloc , response .status_code ))
297
+ logger .warn (u "%s replied with status %d, aborted" % (netloc , response .status_code ))
298
298
post_fetch (response .status_code , error = True )
299
299
return
300
300
301
301
soup = feedparser .parse (response .text )
302
302
# Got parsing error? Log error but do not increment the error counter
303
303
if hasattr (soup , 'bozo' ) and soup .bozo :
304
- logger .info ("%s caused a parser error (%s), tried to parse it anyway" % (netloc , soup .bozo_exception ))
304
+ logger .info (u "%s caused a parser error (%s), tried to parse it anyway" % (netloc , soup .bozo_exception ))
305
305
post_fetch (response .status_code )
306
306
307
307
feed .etag = response .headers .get ('ETag' , None )
@@ -321,7 +321,7 @@ def post_fetch(status, error=False):
321
321
# point to Feed Burner or similar services
322
322
feed .icon = favicon .fetch (feed .alternate_link or feed .self_link )
323
323
feed .icon_last_updated_on = now
324
- logger .debug ("saved favicon %s..." % (feed .icon [:70 ]))
324
+ logger .debug (u "saved favicon %s..." % (feed .icon [:70 ]))
325
325
326
326
post_fetch (response .status_code )
327
327
@@ -334,7 +334,7 @@ def post_fetch(status, error=False):
334
334
guid = get_entry_id (parsed_entry , default = link )
335
335
336
336
if not guid :
337
- logger .warn ('could not find guid for entry from %s, skipped' % netloc )
337
+ logger .warn (u 'could not find guid for entry from %s, skipped' % netloc )
338
338
continue
339
339
340
340
author = get_entry_author (parsed_entry , soup .feed )
@@ -345,13 +345,13 @@ def post_fetch(status, error=False):
345
345
346
346
# Skip ancient feed items
347
347
if max_history and ((now - timestamp ).days > max_history ):
348
- logger .debug ("entry %s from %s is over max_history, skipped" % (guid , netloc ))
348
+ logger .debug (u "entry %s from %s is over max_history, skipped" % (guid , netloc ))
349
349
continue
350
350
351
351
try :
352
352
# If entry is already in database with same id, then skip it
353
353
Entry .get (guid = guid )
354
- logger .debug ("duplicated entry %s, skipped" % guid )
354
+ logger .debug (u "duplicated entry %s, skipped" % guid )
355
355
continue
356
356
except Entry .DoesNotExist :
357
357
pass
@@ -375,7 +375,7 @@ def post_fetch(status, error=False):
375
375
def feed_worker (feed ):
376
376
377
377
if not feed .subscriptions :
378
- logger .debug ("feed %s has no subscribers, skipped" % feed .self_link )
378
+ logger .debug (u "feed %s has no subscribers, skipped" % feed .self_link )
379
379
return
380
380
381
381
# Allow each process to open and close its database connection
@@ -396,12 +396,12 @@ def fetch_feeds():
396
396
397
397
feeds = list (q )
398
398
if not feeds :
399
- logger .debug ("no feeds found to refresh, halted" )
399
+ logger .debug (u "no feeds found to refresh, halted" )
400
400
return
401
401
402
402
load_plugins ()
403
403
404
- logger .debug ("starting fetcher" )
404
+ logger .debug (u "starting fetcher" )
405
405
trigger_event ('fetch_started' )
406
406
407
407
if config .getboolean ('fetcher' , 'multiprocessing' ):
@@ -417,7 +417,7 @@ def fetch_feeds():
417
417
418
418
trigger_event ('fetch_done' , feeds )
419
419
420
- logger .info ("%d feeds checked in %.2fs" % (len (feeds ), time .time () - start ))
420
+ logger .info (u "%d feeds checked in %.2fs" % (len (feeds ), time .time () - start ))
421
421
422
422
423
423
0 commit comments