Skip to content

Commit

Permalink
docs: Fix changelog entry. Prepare test in case #529 advanced.
Browse files Browse the repository at this point in the history
  • Loading branch information
jpmckinney committed Jul 26, 2024
1 parent bf5e1bd commit 0055dd9
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 10 deletions.
10 changes: 5 additions & 5 deletions docs/news.rst
Original file line number Diff line number Diff line change
Expand Up @@ -133,11 +133,11 @@ Removed
- Remove the ``native_stringify_dict`` function.
- Remove undocumented and unused internal environment variables:

- ``SCRAPY_FEED_URI`` to ``SCRAPYD_FEED_URI``
- ``SCRAPY_JOB`` to ``SCRAPYD_JOB``
- ``SCRAPY_LOG_FILE`` to ``SCRAPYD_LOG_FILE``
- ``SCRAPY_SLOT`` to ``SCRAPYD_SLOT``
- ``SCRAPY_SPIDER`` to ``SCRAPYD_SPIDER``
- ``SCRAPYD_FEED_URI``
- ``SCRAPYD_JOB``
- ``SCRAPYD_LOG_FILE``
- ``SCRAPYD_SLOT``
- ``SCRAPYD_SPIDER``

1.4.3 (2023-09-25)
------------------
Expand Down
12 changes: 7 additions & 5 deletions tests/test_webservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
from scrapyd.webservice import spider_list
from tests import get_egg_data, get_finished_job, get_message, has_settings, root_add_version, touch

cliargs = [sys.executable, "-m", "scrapyd.runner", "crawl", "s2", "-s", "DOWNLOAD_DELAY=2", "-a", "arg1=val1"]

job1 = get_finished_job(
project="p1",
spider="s1",
Expand All @@ -27,7 +29,7 @@

@pytest.fixture()
def scrapy_process():
process = ScrapyProcessProtocol(project="p1", spider="s1", job="j1", env={}, args=[])
process = ScrapyProcessProtocol(project="p1", spider="s1", job="j1", env={}, args=cliargs)
process.start_time = datetime.datetime(2001, 2, 3, 4, 5, 6, 9)
process.end_time = datetime.datetime(2001, 2, 3, 4, 5, 6, 10)
process.transport = MagicMock()
Expand Down Expand Up @@ -374,7 +376,7 @@ def test_list_jobs(txrequest, root, scrapy_process, args, exists, chdir):
_job="j1",
_version="0.1",
settings={"DOWNLOAD_DELAY=2": "TRACK=Cause = Time"},
other="one",
arg1="val1",
)

expected["pending"].append(
Expand All @@ -384,7 +386,7 @@ def test_list_jobs(txrequest, root, scrapy_process, args, exists, chdir):
"spider": "s1",
"version": "0.1",
"settings": {"DOWNLOAD_DELAY=2": "TRACK=Cause = Time"},
"args": {"other": "one"},
"args": {"arg1": "val1"},
},
)
assert_content(txrequest, root, "GET", "listjobs", args, expected)
Expand Down Expand Up @@ -584,7 +586,7 @@ def test_schedule_parameters(txrequest, root_with_egg):
b"jobid": [b"aaa"],
b"priority": [b"5"],
b"setting": [b"DOWNLOAD_DELAY=2", b"TRACK=Cause = Time"],
b"other": [b"one", b"two"],
b"arg1": [b"val1", b"val2"],
}
txrequest.method = "POST"
content = root_with_egg.children[b"schedule.json"].render(txrequest)
Expand All @@ -604,7 +606,7 @@ def test_schedule_parameters(txrequest, root_with_egg):
"DOWNLOAD_DELAY": "2",
"TRACK": "Cause = Time",
},
"other": "one", # users are encouraged in api.rst to open an issue if they want multiple values
"arg1": "val1", # users are encouraged in api.rst to open an issue if they want multiple values
}


Expand Down

0 comments on commit 0055dd9

Please sign in to comment.