Skip to content

Commit 2702bf9

Browse files
Enable ruff S113 rule (TheAlgorithms#11375)
* Enable ruff S113 rule * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 7b88e15 commit 2702bf9

36 files changed

+68
-46
lines changed

machine_learning/linear_regression.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,8 @@ def collect_dataset():
1919
"""
2020
response = requests.get(
2121
"https://raw.githubusercontent.com/yashLadha/The_Math_of_Intelligence/"
22-
"master/Week1/ADRvsRating.csv"
22+
"master/Week1/ADRvsRating.csv",
23+
timeout=10,
2324
)
2425
lines = response.text.splitlines()
2526
data = []

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ lint.ignore = [ # `ruff rule S101` for a description of that rule
1414
"RUF002", # Docstring contains ambiguous {}. Did you mean {}?
1515
"RUF003", # Comment contains ambiguous {}. Did you mean {}?
1616
"S101", # Use of `assert` detected -- DO NOT FIX
17-
"S113", # Probable use of requests call without timeout -- FIX ME
1817
"S311", # Standard pseudo-random generators are not suitable for cryptographic purposes -- FIX ME
1918
"SLF001", # Private member accessed: `_Iterator` -- FIX ME
2019
"UP038", # Use `X | Y` in `{}` call instead of `(X, Y)` -- DO NOT FIX

scripts/validate_solutions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def added_solution_file_path() -> list[pathlib.Path]:
5757
"Accept": "application/vnd.github.v3+json",
5858
"Authorization": "token " + os.environ["GITHUB_TOKEN"],
5959
}
60-
files = requests.get(get_files_url(), headers=headers).json()
60+
files = requests.get(get_files_url(), headers=headers, timeout=10).json()
6161
for file in files:
6262
filepath = pathlib.Path.cwd().joinpath(file["filename"])
6363
if (

web_programming/co2_emission.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,13 @@
1111

1212
# Emission in the last half hour
1313
def fetch_last_half_hour() -> str:
14-
last_half_hour = requests.get(BASE_URL).json()["data"][0]
14+
last_half_hour = requests.get(BASE_URL, timeout=10).json()["data"][0]
1515
return last_half_hour["intensity"]["actual"]
1616

1717

1818
# Emissions in a specific date range
1919
def fetch_from_to(start, end) -> list:
20-
return requests.get(f"{BASE_URL}/{start}/{end}").json()["data"]
20+
return requests.get(f"{BASE_URL}/{start}/{end}", timeout=10).json()["data"]
2121

2222

2323
if __name__ == "__main__":

web_programming/covid_stats_via_xpath.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,9 @@ class CovidData(NamedTuple):
1818

1919
def covid_stats(url: str = "https://www.worldometers.info/coronavirus/") -> CovidData:
2020
xpath_str = '//div[@class = "maincounter-number"]/span/text()'
21-
return CovidData(*html.fromstring(requests.get(url).content).xpath(xpath_str))
21+
return CovidData(
22+
*html.fromstring(requests.get(url, timeout=10).content).xpath(xpath_str)
23+
)
2224

2325

2426
fmt = """Total COVID-19 cases in the world: {}

web_programming/crawl_google_results.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
if __name__ == "__main__":
99
print("Googling.....")
1010
url = "https://www.google.com/search?q=" + " ".join(sys.argv[1:])
11-
res = requests.get(url, headers={"UserAgent": UserAgent().random})
11+
res = requests.get(url, headers={"UserAgent": UserAgent().random}, timeout=10)
1212
# res.raise_for_status()
1313
with open("project1a.html", "wb") as out_file: # only for knowing the class
1414
for data in res.iter_content(10000):

web_programming/crawl_google_scholar_citation.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,9 @@ def get_citation(base_url: str, params: dict) -> str:
1111
"""
1212
Return the citation number.
1313
"""
14-
soup = BeautifulSoup(requests.get(base_url, params=params).content, "html.parser")
14+
soup = BeautifulSoup(
15+
requests.get(base_url, params=params, timeout=10).content, "html.parser"
16+
)
1517
div = soup.find("div", attrs={"class": "gs_ri"})
1618
anchors = div.find("div", attrs={"class": "gs_fl"}).find_all("a")
1719
return anchors[2].get_text()

web_programming/currency_converter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@ def convert_currency(
176176
params = locals()
177177
# from is a reserved keyword
178178
params["from"] = params.pop("from_")
179-
res = requests.get(URL_BASE, params=params).json()
179+
res = requests.get(URL_BASE, params=params, timeout=10).json()
180180
return str(res["amount"]) if res["error"] == 0 else res["error_message"]
181181

182182

web_programming/current_stock_price.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@
44

55
def stock_price(symbol: str = "AAPL") -> str:
66
url = f"https://finance.yahoo.com/quote/{symbol}?p={symbol}"
7-
yahoo_finance_source = requests.get(url, headers={"USER-AGENT": "Mozilla/5.0"}).text
7+
yahoo_finance_source = requests.get(
8+
url, headers={"USER-AGENT": "Mozilla/5.0"}, timeout=10
9+
).text
810
soup = BeautifulSoup(yahoo_finance_source, "html.parser")
911
specific_fin_streamer_tag = soup.find("fin-streamer", {"data-test": "qsp-price"})
1012

web_programming/current_weather.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,13 @@ def current_weather(location: str) -> list[dict]:
2020
if OPENWEATHERMAP_API_KEY:
2121
params_openweathermap = {"q": location, "appid": OPENWEATHERMAP_API_KEY}
2222
response_openweathermap = requests.get(
23-
OPENWEATHERMAP_URL_BASE, params=params_openweathermap
23+
OPENWEATHERMAP_URL_BASE, params=params_openweathermap, timeout=10
2424
)
2525
weather_data.append({"OpenWeatherMap": response_openweathermap.json()})
2626
if WEATHERSTACK_API_KEY:
2727
params_weatherstack = {"query": location, "access_key": WEATHERSTACK_API_KEY}
2828
response_weatherstack = requests.get(
29-
WEATHERSTACK_URL_BASE, params=params_weatherstack
29+
WEATHERSTACK_URL_BASE, params=params_weatherstack, timeout=10
3030
)
3131
weather_data.append({"Weatherstack": response_weatherstack.json()})
3232
if not weather_data:

0 commit comments

Comments
 (0)