Advertisement
Guest User

Untitled

a guest
Jun 7th, 2018
283
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 16.81 KB | None | 0 0
  1. # -*- coding: utf-8 -*-
  2. from __future__ import absolute_import, unicode_literals
  3.  
  4. # This is an automatically generated file. You can find more configuration
  5. # parameters in 'config.py' file.
  6.  
  7. # The family of sites to work on by default.
  8. #
  9. # ‘site.py’ imports ‘families/xxx_family.py’, so if you want to change
  10. # this variable, you need to use the name of one of the existing family files
  11. # in that folder or write your own, custom family file.
  12. #
  13. # For ‘site.py’ to be able to read your custom family file, you must
  14. # save it to ‘families/xxx_family.py’, where ‘xxx‘ is the codename of the
  15. # family that your custom ‘xxx_family.py’ family file defines.
  16. #
  17. # You can also save your custom family files to a different folder. As long
  18. # as you follow the ‘xxx_family.py’ naming convention, you can register your
  19. # custom folder in this configuration file with the following global function:
  20. #
  21. #   register_families_folder(folder_path)
  22. #
  23. # Alternatively, you can register particular family files that do not need
  24. # to follow the ‘xxx_family.py’ naming convention using the following
  25. # global function:
  26. #
  27. #   register_family_file(family_name, file_path)
  28. #
  29. # Where ‘family_name’ is the family code (the ‘xxx’ in standard family file
  30. # names) and ‘file_path’ is the absolute path to the target family file.
  31. #
  32. # If you use either of these functions to define the family to work on by
  33. # default (the ‘family’ variable below), you must place the function call
  34. # before the definition of the ‘family’ variable.
  35. family = 'famwiki'
  36.  
  37. # The language code of the site we're working on.
  38. mylang = 'en'
  39.  
  40. # The dictionary usernames should contain a username for each site where you
  41. # have a bot account. If you have a unique username for all languages of a
  42. # family , you can use '*'
  43. usernames['famwiki']['en'] = u'Petike'
  44.  
  45. # The list of BotPasswords is saved in another file. Import it if needed.
  46. # See https://www.mediawiki.org/wiki/Manual:Pywikibot/BotPasswords to know how
  47. # use them.
  48. password_file = "user-password.py"
  49.  
  50. # ############# LOGFILE SETTINGS ##############
  51.  
  52. # Defines for which scripts a logfile should be enabled. Logfiles will be
  53. # saved in the 'logs' subdirectory.
  54. # Example:
  55. #     log = ['interwiki', 'weblinkchecker', 'table2wiki']
  56. # It is also possible to enable logging for all scripts, using this line:
  57. #     log = ['*']
  58. # To disable all logging, use this:
  59. #     log = []
  60. # Per default, logging of interwiki.py is enabled because its logfiles can
  61. # be used to generate so-called warnfiles.
  62. # This setting can be overridden by the -log or -nolog command-line arguments.
  63. log = ['interwiki']
  64. # filename defaults to modulename-bot.log
  65. logfilename = None
  66. # maximal size of a logfile in kilobytes. If the size reached that limit the
  67. # logfile will be renamed (if logfilecount is not 0) and the old file is filled
  68. # again. logfilesize must be an integer value
  69. logfilesize = 1024
  70. # Number of rotating logfiles are created. The older files get the higher
  71. # number. If logfilecount is 0, no logfile will be archived but the current
  72. # logfile will be overwritten if the file size reached the logfilesize above.
  73. # If logfilecount is -1 there are no rotating logfiles but the files where
  74. # renamed if the logfile is full. The newest file gets the highest number until
  75. # some logfiles where deleted.
  76. logfilecount = 5
  77. # set to 1 (or higher) to generate "informative" messages to terminal
  78. verbose_output = 0
  79. # set to True to fetch the pywiki version online
  80. log_pywiki_repo_version = False
  81. # if True, include a lot of debugging info in logfile
  82. # (overrides log setting above)
  83. debug_log = []
  84.  
  85. # ############# EXTERNAL SCRIPT PATH SETTING ##############
  86. # set your own script path to lookup for your script files.
  87. # your private script path must be located inside the
  88. # framework folder, subfolders must be delimited by '.'.
  89. # every folder must contain an (empty) __init__.py file.
  90. #
  91. # The search order is
  92. # 1. user_script_paths in the given order
  93. # 2. scripts
  94. # 3. scripts/maintenance
  95. # 4. scripts/archive
  96. #
  97. # sample:
  98. # user_script_paths = ['scripts.myscripts']
  99. user_script_paths = []
  100.  
  101. # ############# INTERWIKI SETTINGS ##############
  102.  
  103. # Should interwiki.py report warnings for missing links between foreign
  104. # languages?
  105. interwiki_backlink = True
  106.  
  107. # Should interwiki.py display every new link it discovers?
  108. interwiki_shownew = True
  109.  
  110. # Should interwiki.py output a graph PNG file on conflicts?
  111. # You need pydot for this:
  112. # https://pypi.python.org/pypi/pydot/1.0.2
  113. # https://code.google.com/p/pydot/
  114. interwiki_graph = False
  115.  
  116. # Specifies that the robot should process that amount of subjects at a time,
  117. # only starting to load new pages in the original language when the total
  118. # falls below that number. Default is to process (at least) 100 subjects at
  119. # once.
  120. interwiki_min_subjects = 100
  121.  
  122. # If interwiki graphs are enabled, which format(s) should be used?
  123. # Supported formats include png, jpg, ps, and svg. See:
  124. # http://www.graphviz.org/doc/info/output.html
  125. # If you want to also dump the dot files, you can use this in your
  126. # user-config.py:
  127. # interwiki_graph_formats = ['dot', 'png']
  128. # If you need a PNG image with an HTML image map, use this:
  129. # interwiki_graph_formats = ['png', 'cmap']
  130. # If you only need SVG images, use:
  131. # interwiki_graph_formats = ['svg']
  132. interwiki_graph_formats = ['png']
  133.  
  134. # You can post the contents of your autonomous_problems.dat to the wiki,
  135. # e.g. to https://de.wikipedia.org/wiki/Wikipedia:Interwiki-Konflikte .
  136. # This allows others to assist you in resolving interwiki problems.
  137. # To help these people, you can upload the interwiki graphs to your
  138. # webspace somewhere. Set the base URL here, e.g.:
  139. # 'https://www.example.org/~yourname/interwiki-graphs/'
  140. interwiki_graph_url = None
  141.  
  142. # Save file with local articles without interwikis.
  143. without_interwiki = False
  144.  
  145. # Experimental feature:
  146. # Store the page contents on disk (/cache/ directory) instead of loading
  147. # them in RAM.
  148. interwiki_contents_on_disk = False
  149.  
  150. # ############# SOLVE_DISAMBIGUATION SETTINGS ############
  151. #
  152. # Set disambiguation_comment[FAMILY][LANG] to a non-empty string to override
  153. # the default edit comment for the solve_disambiguation bot.
  154. # Use %s to represent the name of the disambiguation page being treated.
  155. # Example:
  156. #
  157. # disambiguation_comment['wikipedia']['en'] = \
  158. #    "Robot-assisted disambiguation ([[WP:DPL|you can help!]]): %s"
  159.  
  160. # Sorting order for alternatives. Set to True to ignore case for sorting order.
  161. sort_ignore_case = False
  162.  
  163. # ############# IMAGE RELATED SETTINGS ##############
  164. # If you set this to True, images will be uploaded to Wikimedia
  165. # Commons by default.
  166. upload_to_commons = False
  167.  
  168. # ############# SETTINGS TO AVOID SERVER OVERLOAD ##############
  169.  
  170. # Slow down the robot such that it never requests a second page within
  171. # 'minthrottle' seconds. This can be lengthened if the server is slow,
  172. # but never more than 'maxthrottle' seconds. However - if you are running
  173. # more than one bot in parallel the times are lengthened.
  174. # By default, the get_throttle is turned off, and 'maxlag' is used to
  175. # control the rate of server access. Set minthrottle to non-zero to use a
  176. # throttle on read access.
  177. minthrottle = 0
  178. maxthrottle = 60
  179.  
  180. # Slow down the robot such that it never makes a second page edit within
  181. # 'put_throttle' seconds.
  182. put_throttle = 10
  183.  
  184. # Sometimes you want to know when a delay is inserted. If a delay is larger
  185. # than 'noisysleep' seconds, it is logged on the screen.
  186. noisysleep = 3.0
  187.  
  188. # Defer bot edits during periods of database server lag. For details, see
  189. # https://www.mediawiki.org/wiki/Maxlag_parameter
  190. # You can set this variable to a number of seconds, or to None (or 0) to
  191. # disable this behavior. Higher values are more aggressive in seeking
  192. # access to the wiki.
  193. # Non-Wikimedia wikis may or may not support this feature; for families
  194. # that do not use it, it is recommended to set minthrottle (above) to
  195. # at least 1 second.
  196. maxlag = 5
  197.  
  198. # Maximum of pages which can be retrieved at one time from wiki server.
  199. # -1 indicates limit by api restriction
  200. step = -1
  201.  
  202. # Maximum number of times to retry an API request before quitting.
  203. max_retries = 15
  204. # Minimum time to wait before resubmitting a failed API request.
  205. retry_wait = 5
  206.  
  207. # ############# TABLE CONVERSION BOT SETTINGS ##############
  208.  
  209. # will split long paragraphs for better reading the source.
  210. # only table2wiki.py use it by now
  211. splitLongParagraphs = False
  212. # sometimes HTML-tables are indented for better reading.
  213. # That can do very ugly results.
  214. deIndentTables = True
  215.  
  216. # ############# WEBLINK CHECKER SETTINGS ##############
  217.  
  218. # How many external links should weblinkchecker.py check at the same time?
  219. # If you have a fast connection, you might want to increase this number so
  220. # that slow servers won't slow you down.
  221. max_external_links = 50
  222.  
  223. report_dead_links_on_talk = False
  224.  
  225. # Don't alert on links days_dead old or younger
  226. weblink_dead_days = 7
  227.  
  228. # ############# DATABASE SETTINGS ##############
  229. # Setting to connect the database or replica of the database of the wiki.
  230. # db_name_format can be used to manipulate the dbName of site.
  231. # Example for a pywikibot running on wmflabs:
  232. # db_hostname = 'enwiki.labsdb'
  233. # db_name_format = '{0}_p'
  234. # db_connect_file = user_home_path('replica.my.cnf')
  235. db_hostname = 'localhost'
  236. db_username = ''
  237. db_password = ''
  238. db_name_format = '{0}'
  239. db_connect_file = user_home_path('.my.cnf')
  240. # local port for mysql server
  241. # ssh -L 4711:enwiki.labsdb:3306 user@tools-login.wmflabs.org
  242. db_port = 3306
  243.  
  244. # ############# SEARCH ENGINE SETTINGS ##############
  245.  
  246. # Yahoo! Search Web Services are not operational.
  247. # See https://phabricator.wikimedia.org/T106085
  248. yahoo_appid = ''
  249.  
  250. # To use Windows Live Search web service you must get an AppID from
  251. # http://www.bing.com/dev/en-us/dev-center
  252. msn_appid = ''
  253.  
  254. # ############# FLICKR RIPPER SETTINGS ##############
  255.  
  256. # Using the Flickr api
  257. flickr = {
  258.     'api_key': u'',  # Provide your key!
  259.     'api_secret': u'',  # Api secret of your key (optional)
  260.     'review': False,  # Do we use automatically make our uploads reviewed?
  261.     'reviewer': u'',  # If so, under what reviewer name?
  262. }
  263.  
  264. # ############# COPYRIGHT SETTINGS ##############
  265.  
  266. # Enable/disable search engine in copyright.py script
  267. copyright_google = True
  268. copyright_yahoo = True
  269. copyright_msn = False
  270.  
  271. # Perform a deep check, loading URLs to search if 'Wikipedia' is present.
  272. # This may be useful to increase the number of correct results. If you haven't
  273. # a fast connection, you might want to keep them disabled.
  274. copyright_check_in_source_google = False
  275. copyright_check_in_source_yahoo = False
  276. copyright_check_in_source_msn = False
  277.  
  278. # Web pages may contain a Wikipedia text without the word 'Wikipedia' but with
  279. # the typical '[edit]' tag as a result of a copy & paste procedure. You want
  280. # no report for this kind of URLs, even if they are copyright violations.
  281. # However, when enabled, these URLs are logged in a file.
  282. copyright_check_in_source_section_names = False
  283.  
  284. # Limit number of queries for page.
  285. copyright_max_query_for_page = 25
  286.  
  287. # Skip a specified number of queries
  288. copyright_skip_query = 0
  289.  
  290. # Number of attempts on connection error.
  291. copyright_connection_tries = 10
  292.  
  293. # Behavior if an exceeded error occur.
  294. #
  295. # Possibilities:
  296. #
  297. #    0 = None
  298. #    1 = Disable search engine
  299. #    2 = Sleep (default)
  300. #    3 = Stop
  301. copyright_exceeded_in_queries = 2
  302. copyright_exceeded_in_queries_sleep_hours = 6
  303.  
  304. # Append last modified date of URL to script result
  305. copyright_show_date = True
  306.  
  307. # Append length of URL to script result
  308. copyright_show_length = True
  309.  
  310. # By default the script tries to identify and skip text that contains a large
  311. # comma separated list or only numbers. But sometimes that might be the
  312. # only part unmodified of a slightly edited and not otherwise reported
  313. # copyright violation. You can disable this feature to try to increase the
  314. # number of results.
  315. copyright_economize_query = True
  316.  
  317. # ############# HTTP SETTINGS ##############
  318. # Use a persistent http connection. An http connection has to be established
  319. # only once per site object, making stuff a whole lot faster. Do NOT EVER
  320. # use this if you share Site objects across threads without proper locking.
  321. #
  322. # DISABLED FUNCTION. Setting this variable will not have any effect.
  323. persistent_http = False
  324.  
  325. # Default socket timeout in seconds.
  326. # DO NOT set to None to disable timeouts. Otherwise this may freeze your script.
  327. # You may assign either a tuple of two int or float values for connection and
  328. # read timeout, or a single value for both in a tuple (since requests 2.4.0).
  329. socket_timeout = (6.05, 45)
  330.  
  331.  
  332. # ############# COSMETIC CHANGES SETTINGS ##############
  333. # The bot can make some additional changes to each page it edits, e.g. fix
  334. # whitespace or positioning of interwiki and category links.
  335.  
  336. # This is an experimental feature; handle with care and consider re-checking
  337. # each bot edit if enabling this!
  338. cosmetic_changes = False
  339.  
  340. # If cosmetic changes are switched on, and you also have several accounts at
  341. # projects where you're not familiar with the local conventions, you probably
  342. # only want the bot to do cosmetic changes on your "home" wiki which you
  343. # specified in config.mylang and config.family.
  344. # If you want the bot to also do cosmetic changes when editing a page on a
  345. # foreign wiki, set cosmetic_changes_mylang_only to False, but be careful!
  346. cosmetic_changes_mylang_only = True
  347.  
  348. # The dictionary cosmetic_changes_enable should contain a tuple of languages
  349. # for each site where you wish to enable in addition to your own langlanguage
  350. # (if cosmetic_changes_mylang_only is set)
  351. # Please set your dictionary by adding such lines to your user-config.py:
  352. # cosmetic_changes_enable['wikipedia'] = ('de', 'en', 'fr')
  353. cosmetic_changes_enable = {}
  354.  
  355. # The dictionary cosmetic_changes_disable should contain a tuple of languages
  356. # for each site where you wish to disable cosmetic changes. You may use it with
  357. # cosmetic_changes_mylang_only is False, but you can also disable your own
  358. # language. This also overrides the settings in the cosmetic_changes_enable
  359. # dictionary. Please set your dict by adding such lines to your user-config.py:
  360. # cosmetic_changes_disable['wikipedia'] = ('de', 'en', 'fr')
  361. cosmetic_changes_disable = {}
  362.  
  363. # cosmetic_changes_deny_script is a list of scripts for which cosmetic changes
  364. # are disabled. You may add additional scripts by appending script names in
  365. # your user-config.py ("+=" operator is strictly recommended):
  366. # cosmetic_changes_deny_script += ['your_script_name_1', 'your_script_name_2']
  367. # Appending the script name also works:
  368. # cosmetic_changes_deny_script.append('your_script_name')
  369. cosmetic_changes_deny_script = ['category_redirect', 'cosmetic_changes',
  370.                                 'newitem', 'touch']
  371.  
  372. # ############# REPLICATION BOT ################
  373. # You can add replicate_replace to your user-config.py, which has the following
  374. # format:
  375. #
  376. # replicate_replace = {
  377. #            'wikipedia:li': {'Hoofdpagina': 'Veurblaad'}
  378. # }
  379. #
  380. # to replace all occurrences of 'Hoofdpagina' with 'Veurblaad' when writing to
  381. # liwiki. Note that this does not take the origin wiki into account.
  382. replicate_replace = {}
  383.  
  384. # ############# FURTHER SETTINGS ##############
  385.  
  386. # Proxy configuration
  387.  
  388. # TODO: proxy support
  389. proxy = None
  390.  
  391. # Simulate settings
  392.  
  393. # Defines what additional actions the bots are NOT allowed to do (e.g. 'edit')
  394. # on the wiki server. Allows simulation runs of bots to be carried out without
  395. # changing any page on the server side. Use this setting to add more actions
  396. # in user-config.py for wikis with extra write actions.
  397. actions_to_block = []
  398.  
  399. # Set simulate to True or use -simulate option to block all actions given above.
  400. simulate = False
  401.  
  402. # How many pages should be put to a queue in asynchronous mode.
  403. # If maxsize is <= 0, the queue size is infinite.
  404. # Increasing this value will increase memory space but could speed up
  405. # processing. As higher this value this effect will decrease.
  406. max_queue_size = 64
  407.  
  408. # Define the line separator. Pages retrieved via API have "\n" whereas
  409. # pages fetched from screen (mostly) have "\r\n". Interwiki and category
  410. # separator settings in family files should use multiplied of this.
  411. # LS is a shortcut alias.
  412. line_separator = LS = u'\n'
  413.  
  414. # Settings to enable mwparserfromhell
  415. # <https://mwparserfromhell.readthedocs.org/en/latest/>
  416. # Currently used in textlib.extract_templates_and_params
  417. # This is more accurate than our current regex, but only works
  418. # if the user has already installed the library.
  419. use_mwparserfromhell = True
  420.  
  421. # Pickle protocol version to use for storing dumps.
  422. # This config variable is not used for loading dumps.
  423. # Version 2 is common to both Python 2 and 3, and should
  424. # be used when dumps are accessed by both versions.
  425. # Version 4 is only available for Python 3.4
  426. pickle_protocol = 2
  427.  
  428. # End of configuration section
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement