Advertisement
Guest User

Untitled

a guest
Jan 10th, 2012
128
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 9.99 KB | None | 0 0
  1. # -*- coding: utf-8 -*-
  2.  
  3. # This is an automatically generated file. You can find more configuration
  4. # parameters in 'config.py' file.
  5.  
  6. # The family of sites we are working on. wikipedia.py will import
  7. # families/xxx_family.py so if you want to change this variable,
  8. # you need to write such a file.
  9. family = 'wikipedia'
  10.  
  11. # The language code of the site we're working on.
  12. mylang = 'es'
  13.  
  14. # The dictionary usernames should contain a username for each site where you
  15. # have a bot account.
  16. usernames['wikipedia']['es'] = u'Johnbot'
  17.  
  18.  
  19. ############## LOGFILE SETTINGS ##############
  20.  
  21. # Defines for which scripts a logfile should be enabled. Logfiles will be
  22. # saved in the 'logs' subdirectory.
  23. # Example:
  24. # log = ['interwiki', 'weblinkchecker', 'table2wiki']
  25. # It is also possible to enable logging for all scripts, using this line:
  26. # log = ['*']
  27. # To disable all logging, use this:
  28. # log = []
  29. # Per default, logging of interwiki.py is enabled because its logfiles can
  30. # be used to generate so-called warnfiles.
  31. # This setting can be overridden by the -log or -nolog command-line arguments.
  32. log = ['interwiki']
  33.  
  34.  
  35. ############## INTERWIKI SETTINGS ##############
  36.  
  37. # Should interwiki.py report warnings for missing links between foreign
  38. # languages?
  39. interwiki_backlink = True
  40.  
  41. # Should interwiki.py display every new link it discovers?
  42. interwiki_shownew = True
  43.  
  44. # Should interwiki.py output a graph PNG file on conflicts?
  45. # You need pydot for this: http://dkbza.org/pydot.html
  46. interwiki_graph = False
  47.  
  48. # Specifies that the robot should process that amount of subjects at a time,
  49. # only starting to load new pages in the original language when the total
  50. # falls below that number. Default is to process (at least) 100 subjects at
  51. # once.
  52. interwiki_min_subjects = 100
  53.  
  54. # If interwiki graphs are enabled, which format(s) should be used?
  55. # Supported formats include png, jpg, ps, and svg. See:
  56. # http://www.graphviz.org/doc/info/output.html
  57. # If you want to also dump the dot files, you can use this in your
  58. # user-config.py:
  59. # interwiki_graph_formats = ['dot', 'png']
  60. # If you need a PNG image with an HTML image map, use this:
  61. # interwiki_graph_formats = ['png', 'cmap']
  62. # If you only need SVG images, use:
  63. # interwiki_graph_formats = ['svg']
  64. interwiki_graph_formats = ['png']
  65.  
  66. # You can post the contents of your autonomous_problems.dat to the wiki,
  67. # e.g. to http://de.wikipedia.org/wiki/Wikipedia:Interwiki-Konflikte .
  68. # This allows others to assist you in resolving interwiki problems.
  69. # To help these people, you can upload the interwiki graphs to your
  70. # webspace somewhere. Set the base URL here, e.g.:
  71. # 'http://www.example.org/~yourname/interwiki-graphs/'
  72. interwiki_graph_url = None
  73.  
  74. # Save file with local articles without interwikis.
  75. without_interwiki = False
  76.  
  77. # Experimental feature:
  78. # Store the page contents on disk (/cache/ directory) instead of loading
  79. # them in RAM.
  80. interwiki_contents_on_disk = False
  81.  
  82.  
  83. ############## SOLVE_DISAMBIGUATION SETTINGS ############
  84. #
  85. # Set disambiguation_comment[FAMILY][LANG] to a non-empty string to override
  86. # the default edit comment for the solve_disambiguation bot.
  87. # Use %s to represent the name of the disambiguation page being treated.
  88. # Example:
  89. #
  90. # disambiguation_comment['wikipedia']['en'] = \
  91. # "Robot-assisted disambiguation ([[WP:DPL|you can help!]]): %s"
  92.  
  93. sort_ignore_case = False
  94.  
  95.  
  96. ############## IMAGE RELATED SETTINGS ##############
  97. # If you set this to True, images will be uploaded to Wikimedia
  98. # Commons by default.
  99. upload_to_commons = False
  100.  
  101.  
  102. ############## TABLE CONVERSION BOT SETTINGS ##############
  103.  
  104. # will split long paragraphs for better reading the source.
  105. # only table2wiki.py use it by now
  106. splitLongParagraphs = False
  107. # sometimes HTML-tables are indented for better reading.
  108. # That can do very ugly results.
  109. deIndentTables = True
  110. # table2wiki.py works quite stable, so you might switch to True
  111. table2wikiAskOnlyWarnings = True
  112. table2wikiSkipWarnings = False
  113.  
  114.  
  115. ############## WEBLINK CHECKER SETTINGS ##############
  116.  
  117. # How many external links should weblinkchecker.py check at the same time?
  118. # If you have a fast connection, you might want to increase this number so
  119. # that slow servers won't slow you down.
  120. max_external_links = 50
  121.  
  122. report_dead_links_on_talk = False
  123.  
  124.  
  125. ############## DATABASE SETTINGS ##############
  126. db_hostname = 'localhost'
  127. db_username = 'wikiuser'
  128. db_password = ''
  129.  
  130.  
  131. ############## SEARCH ENGINE SETTINGS ##############
  132.  
  133. # Some scripts allow querying Google via the Google Web API. To use this feature,
  134. # you must install the pyGoogle module from http://pygoogle.sf.net/ and have a
  135. # Google Web API license key. Note that Google doesn't give out license keys
  136. # anymore.
  137. # --------------------
  138. # Google web API is obsoleted for long time, now we can use Google AJAX Search API,
  139. # You can signup an API key from http://code.google.com/apis/ajaxsearch/signup.html.
  140. google_key = ''
  141.  
  142. # using Google AJAX Search API, it requires the referer website, this variable saves the referer web address
  143. # when you sign up with the key.
  144. google_api_refer = ''
  145.  
  146. # Some scripts allow using the Yahoo! Search Web Services. To use this feature,
  147. # you must install the pYsearch module from http://pysearch.sourceforge.net/
  148. # and get a Yahoo AppID from http://developer.yahoo.com
  149. yahoo_appid = ''
  150.  
  151. # To use Windows Live Search web service you must get an AppID from
  152. # http://search.msn.com/developer
  153. msn_appid = ''
  154.  
  155. # Using the Flickr api
  156. flickr = {
  157. 'api_key': u'', # Provide your key!
  158. 'review': False, # Do we use automatically make our uploads reviewed?
  159. 'reviewer': u'', # If so, under what reviewer name?
  160. }
  161.  
  162. # Using the Panoramio api
  163. panoramio = {
  164. 'review': False, # Do we use automatically make our uploads reviewed?
  165. 'reviewer': u'', # If so, under what reviewer name?
  166. }
  167.  
  168. # for all connections: proxy handle
  169. # to use it, proxy['host'] have to support HTTP and include port number (e.g. localhost:8080)
  170. # if the proxy server needs authentication, set ('ID', 'PASSWORD') to proxy['auth'].
  171. proxy = {
  172. 'host': None,
  173. 'auth': None,
  174. }
  175.  
  176.  
  177. ############## COPYRIGHT SETTINGS ##############
  178.  
  179. # Enable/disable search engine in copyright.py script
  180. copyright_google = True
  181. copyright_yahoo = True
  182. copyright_msn = False
  183.  
  184. # Perform a deep check, loading URLs to search if 'Wikipedia' is present.
  185. # This may be useful to increase the number of correct results. If you haven't
  186. # a fast connection, you might want to keep them disabled.
  187. copyright_check_in_source_google = False
  188. copyright_check_in_source_yahoo = False
  189. copyright_check_in_source_msn = False
  190.  
  191. # Web pages may contain a Wikipedia text without the word 'Wikipedia' but with
  192. # the typical '[edit]' tag as a result of a copy & paste procedure. You want
  193. # no report for this kind of URLs, even if they are copyright violations.
  194. # However, when enabled, these URLs are logged in a file.
  195. copyright_check_in_source_section_names = False
  196.  
  197. # Limit number of queries for page.
  198. copyright_max_query_for_page = 25
  199.  
  200. # Skip a specified number of queries
  201. copyright_skip_query = 0
  202.  
  203. # Number of attempts on connection error.
  204. copyright_connection_tries = 10
  205.  
  206. # Behavior if an exceeded error occur.
  207. #
  208. # Possibilities:
  209. #
  210. # 0 = None
  211. # 1 = Disable search engine
  212. # 2 = Sleep (default)
  213. # 3 = Stop
  214. copyright_exceeded_in_queries = 2
  215. copyright_exceeded_in_queries_sleep_hours = 6
  216.  
  217. # Append last modified date of URL to script result
  218. copyright_show_date = True
  219.  
  220. # Append length of URL to script result
  221. copyright_show_length = True
  222.  
  223. # By default the script tries to identify and skip text that contains a large
  224. # comma separated list or only numbers. But sometimes that might be the
  225. # only part unmodified of a slightly edited and not otherwise reported
  226. # copyright violation. You can disable this feature to try to increase the
  227. # number of results.
  228. copyright_economize_query = True
  229.  
  230.  
  231. ############## FURTHER SETTINGS ##############
  232. # The bot can make some additional changes to each page it edits, e.g. fix
  233. # whitespace or positioning of interwiki and category links.
  234.  
  235. # This is an experimental feature; handle with care and consider re-checking
  236. # each bot edit if enabling this!
  237. cosmetic_changes = False
  238.  
  239. # If cosmetic changes are switched on, and you also have several accounts at
  240. # projects where you're not familiar with the local conventions, you probably
  241. # only want the bot to do cosmetic changes on your "home" wiki which you
  242. # specified in config.mylang and config.family.
  243. # If you want the bot to also do cosmetic changes when editing a page on a
  244. # foreign wiki, set cosmetic_changes_mylang_only to False, but be careful!
  245. cosmetic_changes_mylang_only = True
  246. # The dictionary cosmetic_changes_enable should contain a tuple of languages
  247. # for each site where you wish to enable in addition to your own langlanguage
  248. # (if cosmetic_changes_mylang_only is set)
  249. # Please set your dictionary by adding such lines to your user-config.py:
  250. # cosmetic_changes_enable['wikipedia'] = ('de', 'en', 'fr')
  251. cosmetic_changes_enable = {}
  252. # The dictionary cosmetic_changes_disable should contain a tuple of languages
  253. # for each site where you wish to disable cosmetic changes. You may use it with
  254. # cosmetic_changes_mylang_only is False, but you can also disable your own
  255. # language. This also overrides the settings in the cosmetic_changes_enable
  256. # dictionary. Please set your dict by adding such lines to your user-config.py:
  257. # cosmetic_changes_disable['wikipedia'] = ('de', 'en', 'fr')
  258. cosmetic_changes_disable = {}
  259. # Use the experimental disk cache to prevent huge memory usage
  260. use_diskcache = False
  261.  
  262. # Retry loading a page on failure (back off 1 minute, 2 minutes, 4 minutes
  263. # up to 30 minutes)
  264. retry_on_fail = True
  265.  
  266. # How many pages should be put to a queue in asynchroneous mode.
  267. # If maxsize is <= 0, the queue size is infinite.
  268. # Increasing this value will increase memory space but could speed up
  269. # processing. As higher this value this effect will decrease.
  270. max_queue_size = 0
  271.  
  272. # End of configuration section
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement