Datasets:

Modalities:
Tabular
Text
Formats:
json
Size:
< 1K
ArXiv:
DOI:
Libraries:
Datasets
Dask
File size: 12,611 Bytes
7b61cd4
1
{"language": "Python", "id": 16, "repo_owner": "scrapy", "repo_name": "scrapy", "head_branch": "master", "workflow_name": "Checks", "workflow_filename": "checks.yml", "workflow_path": ".github/workflows/checks.yml", "contributor": "pyvadev", "sha_fail": "cdfe3ca519dc8d1c3add855c05bb8f56ef25ae84", "sha_success": "731f7495563591f1b76b1b2ae83f07d2239b7897", "workflow": "name: Checks\non: [push, pull_request]\n\nconcurrency:\n  group: ${{github.workflow}}-${{ github.ref }}\n  cancel-in-progress: true\n\njobs:\n  checks:\n    runs-on: ubuntu-latest\n    strategy:\n      fail-fast: false\n      matrix:\n        include:\n        - python-version: \"3.12\"\n          env:\n            TOXENV: pylint\n        - python-version: 3.8\n          env:\n            TOXENV: typing\n        - python-version: \"3.11\"  # Keep in sync with .readthedocs.yml\n          env:\n            TOXENV: docs\n        - python-version: \"3.12\"\n          env:\n            TOXENV: twinecheck\n\n    steps:\n    - uses: actions/checkout@v4\n\n    - name: Set up Python ${{ matrix.python-version }}\n      uses: actions/setup-python@v4\n      with:\n        python-version: ${{ matrix.python-version }}\n\n    - name: Run check\n      env: ${{ matrix.env }}\n      run: |\n        pip install -U tox\n        tox\n\n  pre-commit:\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v4\n    - uses: pre-commit/[email protected]\n", "logs": [{"step_name": "pre-commit/3_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n  extra_args: --all-files\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nDefaulting to user installation because normal site-packages is not writeable\nCollecting pre-commit\n  Downloading pre_commit-3.5.0-py2.py3-none-any.whl (203 kB)\n     \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 203.7/203.7 KB 5.6 MB/s eta 0:00:00\nCollecting virtualenv>=20.10.0\n  Downloading virtualenv-20.24.7-py3-none-any.whl (3.8 MB)\n     \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 16.4 MB/s eta 0:00:00\nRequirement already satisfied: pyyaml>=5.1 in /usr/lib/python3/dist-packages (from pre-commit) (5.4.1)\nCollecting identify>=1.0.0\n  Downloading identify-2.5.32-py2.py3-none-any.whl (98 kB)\n     \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 KB 30.7 MB/s eta 0:00:00\nCollecting cfgv>=2.0.0\n  Downloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nCollecting nodeenv>=0.11.1\n  Downloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nRequirement already satisfied: setuptools in /usr/lib/python3/dist-packages (from nodeenv>=0.11.1->pre-commit) (59.6.0)\nCollecting platformdirs<5,>=3.9.1\n  Downloading platformdirs-4.0.0-py3-none-any.whl (17 kB)\nCollecting filelock<4,>=3.12.2\n  Downloading filelock-3.13.1-py3-none-any.whl (11 kB)\nCollecting distlib<1,>=0.3.7\n  Downloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n     \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 KB 28.3 MB/s eta 0:00:00\nInstalling collected packages: distlib, platformdirs, nodeenv, identify, filelock, cfgv, virtualenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.7 filelock-3.13.1 identify-2.5.32 nodeenv-1.8.0 platformdirs-4.0.0 pre-commit-3.5.0 virtualenv-20.24.7\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nargcomplete==3.1.6\ncfgv==3.4.0\ndistlib==0.3.7\nfilelock==3.13.1\nidentify==2.5.32\nnodeenv==1.8.0\npackaging==23.2\npipx==1.2.1\nplatformdirs==4.0.0\npre-commit==3.5.0\nuserpath==1.9.1\nvirtualenv==20.24.7\n##[group]Run actions/cache@v3\nwith:\n  path: ~/.cache/pre-commit\n  key: pre-commit-3||7a8fe885594aed9a90fd5938b4bb49b65732538a44c08aad3d6ea69d9d0cf64c\n  enableCrossOsArchive: false\n  fail-on-cache-miss: false\n  lookup-only: false\n##[endgroup]\nCache Size: ~33 MB (34938818 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/a467b59a-7374-4fce-b8ee-234d26fbaca7/cache.tzst -P -C /home/runner/work/scrapy/scrapy --use-compress-program unzstd\nReceived 34938818 of 34938818 (100.0%), 33.3 MBs/sec\nCache restored successfully\nCache restored from key: pre-commit-3||7a8fe885594aed9a90fd5938b4bb49b65732538a44c08aad3d6ea69d9d0cf64c\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nbandit...................................................................\u001b[42mPassed\u001b[m\nflake8...................................................................\u001b[42mPassed\u001b[m\nblack....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: black\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\n\u001b[1mreformatted scrapy/cmdline.py\u001b[0m\n\n\u001b[1mAll done! \u2728 \ud83c\udf70 \u2728\u001b[0m\n\u001b[34m\u001b[1m1 file \u001b[0m\u001b[1mreformatted\u001b[0m, \u001b[34m340 files \u001b[0mleft unchanged.\n\nisort....................................................................\u001b[42mPassed\u001b[m\nblacken-docs.............................................................\u001b[42mPassed\u001b[m\npre-commit hook(s) made changes.\nIf you are seeing this message in CI, reproduce locally with: `pre-commit run --all-files`.\nTo run `pre-commit` as part of git workflow, use `pre-commit install`.\nAll changes made by hooks:\n\u001b[1mdiff --git a/scrapy/cmdline.py b/scrapy/cmdline.py\u001b[m\n\u001b[1mindex 761f9df..69e4f79 100644\u001b[m\n\u001b[1m--- a/scrapy/cmdline.py\u001b[m\n\u001b[1m+++ b/scrapy/cmdline.py\u001b[m\n\u001b[36m@@ -72,7 +72,7 @@\u001b[m \u001b[mdef _get_commands_dict(settings, inproject):\u001b[m\n \u001b[m\n def _pop_command_name(argv):\u001b[m\n     for i, v in enumerate(argv):\u001b[m\n\u001b[31m-        if v.startswith('-'):\u001b[m\n\u001b[32m+\u001b[m\u001b[32m        if v.startswith(\"-\"):\u001b[m\n             continue\u001b[m\n         return argv.pop(i)\u001b[m\n \u001b[m\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/docs/topics/request-response.rst b/docs/topics/request-response.rst\nindex adf3d0f4a..8edf710bc 100644\n--- a/docs/topics/request-response.rst\n+++ b/docs/topics/request-response.rst\n@@ -193,18 +193,47 @@ Request objects\n         :meth:`replace`.\n \n     .. attribute:: Request.meta\n-\n-        A dict that contains arbitrary metadata for this request. This dict is\n-        empty for new Requests, and is usually  populated by different Scrapy\n-        components (extensions, middlewares, etc). So the data contained in this\n-        dict depends on the extensions you have enabled.\n-\n-        See :ref:`topics-request-meta` for a list of special meta keys\n-        recognized by Scrapy.\n-\n-        This dict is :doc:`shallow copied <library/copy>` when the request is\n-        cloned using the ``copy()`` or ``replace()`` methods, and can also be\n-        accessed, in your spider, from the ``response.meta`` attribute.\n+       :value: {}\n+\n+        A dictionary of arbitrary metadata for the request.\n+\n+        You may extend request metadata as you see fit.\n+\n+        Request metadata can also be accessed through the\n+        :attr:`~scrapy.http.Response.meta` attribute of a response.\n+\n+        To pass data from one spider callback to another, consider using\n+        :attr:`cb_kwargs` instead. However, request metadata may be the right\n+        choice in certain scenarios, such as to maintain some debugging data\n+        across all follow-up requests (e.g. the source URL).\n+\n+        A common use of request metadata is to define request-specific\n+        parameters for Scrapy components (extensions, middlewares, etc.). For\n+        example, if you set ``dont_retry`` to ``True``,\n+        :class:`~scrapy.downloadermiddlewares.retry.RetryMiddleware` will never\n+        retry that request, even if it fails. See :ref:`topics-request-meta`.\n+\n+        You may also use request metadata in your custom Scrapy components, for\n+        example, to keep request state information relevant to your component.\n+        For example,\n+        :class:`~scrapy.downloadermiddlewares.retry.RetryMiddleware` uses the\n+        ``retry_times`` metadata key to keep track of how many times a request\n+        has been retried so far.\n+\n+        Copying all the metadata of a previous request into a new, follow-up\n+        request in a spider callback is a bad practice, because request\n+        metadata may include metadata set by Scrapy components that is not\n+        meant to be copied into other requests. For example, copying the\n+        ``retry_times`` metadata key into follow-up requests can lower the\n+        amount of retries allowed for those follow-up requests.\n+\n+        You should only copy all request metadata from one request to another\n+        if the new request is meant to replace the old request, as is often the\n+        case when returning a request from a :ref:`downloader middleware\n+        <topics-downloader-middleware>` method.\n+\n+        Also mind that the :meth:`copy` and :meth:`replace` request methods\n+        :doc:`shallow-copy <library/copy>` request metadata.\n \n     .. attribute:: Request.cb_kwargs\n \ndiff --git a/scrapy/cmdline.py b/scrapy/cmdline.py\nindex 761f9df83..6580ba9ce 100644\n--- a/scrapy/cmdline.py\n+++ b/scrapy/cmdline.py\n@@ -71,10 +71,12 @@ def _get_commands_dict(settings, inproject):\n \n \n def _pop_command_name(argv):\n-    for i, v in enumerate(argv):\n-        if v.startswith('-'):\n-            continue\n-        return argv.pop(i)\n+    i = 0\n+    for arg in argv[1:]:\n+        if not arg.startswith(\"-\"):\n+            del argv[i]\n+            return arg\n+        i += 1\n \n \n def _print_header(settings, inproject):\n@@ -86,11 +88,12 @@ def _print_header(settings, inproject):\n         print(f\"Scrapy {version} - no active project\\n\")\n \n \n-def _print_commands(settings, inproject, cmds):\n+def _print_commands(settings, inproject):\n     _print_header(settings, inproject)\n     print(\"Usage:\")\n     print(\"  scrapy <command> [options] [args]\\n\")\n     print(\"Available commands:\")\n+    cmds = _get_commands_dict(settings, inproject)\n     for cmdname, cmdclass in sorted(cmds.items()):\n         print(f\"  {cmdname:<13} {cmdclass.short_desc()}\")\n     if not inproject:\n@@ -106,7 +109,6 @@ def _print_unknown_command(settings, cmdname, inproject):\n     print('Use \"scrapy\" to see available commands')\n \n \n-# TODO: Confusion, can be improved.\n def _run_print_help(parser, func, *a, **kw):\n     try:\n         func(*a, **kw)\n@@ -134,9 +136,9 @@ def execute(argv=None, settings=None):\n \n     inproject = inside_project()\n     cmds = _get_commands_dict(settings, inproject)\n-    cmdname = _pop_command_name(argv[1:])\n+    cmdname = _pop_command_name(argv)\n     if not cmdname:\n-        _print_commands(settings, inproject, cmds)\n+        _print_commands(settings, inproject)\n         sys.exit(0)\n     elif cmdname not in cmds:\n         _print_unknown_command(settings, cmdname, inproject)\n@@ -152,7 +154,7 @@ def execute(argv=None, settings=None):\n     settings.setdict(cmd.default_settings, priority=\"command\")\n     cmd.settings = settings\n     cmd.add_options(parser)\n-    opts, args = parser.parse_known_args(argv[1:])\n+    opts, args = parser.parse_known_args(args=argv[1:])\n     _run_print_help(parser, cmd.process_options, args, opts)\n \n     cmd.crawler_process = CrawlerProcess(settings)\n", "difficulty": "1"}