Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions DIRECTORY.md
Original file line number Diff line number Diff line change
Expand Up @@ -899,6 +899,7 @@
* [N Body Simulation](physics/n_body_simulation.py)
* [Newtons Law Of Gravitation](physics/newtons_law_of_gravitation.py)
* [Newtons Second Law Of Motion](physics/newtons_second_law_of_motion.py)
* [Orbital Transfer Work](physics/orbital_transfer_work.py)
* [Period Of Pendulum](physics/period_of_pendulum.py)
* [Photoelectric Effect](physics/photoelectric_effect.py)
* [Potential Energy](physics/potential_energy.py)
Expand Down
12 changes: 10 additions & 2 deletions machine_learning/linear_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,24 @@
Rating). We try to best fit a line through dataset and estimate the parameters.
"""

# /// script
# requires-python = ">=3.13"
# dependencies = [
# "httpx",
# "numpy",
# ]
# ///

import httpx
import numpy as np
import requests


def collect_dataset():
"""Collect dataset of CSGO
The dataset contains ADR vs Rating of a Player
:return : dataset obtained from the link, as matrix
"""
response = requests.get(
response = httpx.get(
"https://github.com/raw/yashLadha/The_Math_of_Intelligence/"
"master/Week1/ADRvsRating.csv",
timeout=10,
Expand Down
8 changes: 4 additions & 4 deletions physics/speeds_of_gas_molecules.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,9 @@ def avg_speed_of_molecule(temperature: float, molar_mass: float) -> float:
Examples:

>>> avg_speed_of_molecule(273, 0.028) # nitrogen at 273 K
454.3488755020387
454.3488755062257
>>> avg_speed_of_molecule(300, 0.032) # oxygen at 300 K
445.52572733919885
445.5257273433045
>>> avg_speed_of_molecule(-273, 0.028) # invalid temperature
Traceback (most recent call last):
...
Expand All @@ -87,9 +87,9 @@ def mps_speed_of_molecule(temperature: float, molar_mass: float) -> float:
Examples:

>>> mps_speed_of_molecule(273, 0.028) # nitrogen at 273 K
402.65620701908966
402.65620702280023
>>> mps_speed_of_molecule(300, 0.032) # oxygen at 300 K
394.836895549922
394.8368955535605
>>> mps_speed_of_molecule(-273, 0.028) # invalid temperature
Traceback (most recent call last):
...
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ classifiers = [
dependencies = [
"beautifulsoup4>=4.12.3",
"fake-useragent>=1.5.1",
"httpx>=0.28.1",
"imageio>=2.36.1",
"keras>=3.7",
"lxml>=5.3",
Expand All @@ -19,7 +20,6 @@ dependencies = [
"opencv-python>=4.10.0.84",
"pandas>=2.2.3",
"pillow>=11",
"requests>=2.32.3",
"rich>=13.9.4",
"scikit-learn>=1.5.2",
"sphinx-pyproject>=0.3",
Expand All @@ -42,8 +42,8 @@ docs = [
"sphinx-pyproject>=0.3",
]
euler-validate = [
"httpx>=0.28.1",
"numpy>=2.1.3",
"requests>=2.32.3",
]

[tool.ruff]
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
beautifulsoup4
fake-useragent
httpx
imageio
keras
lxml
Expand All @@ -8,7 +9,6 @@ numpy
opencv-python
pandas
pillow
requests
rich
scikit-learn
sphinx-pyproject
Expand Down
6 changes: 3 additions & 3 deletions scripts/validate_solutions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
# /// script
# requires-python = ">=3.13"
# dependencies = [
# "httpx",
# "pytest",
# "requests",
# ]
# ///

Expand All @@ -15,8 +15,8 @@
import pathlib
from types import ModuleType

import httpx
import pytest
import requests

PROJECT_EULER_DIR_PATH = pathlib.Path.cwd().joinpath("project_euler")
PROJECT_EULER_ANSWERS_PATH = pathlib.Path.cwd().joinpath(
Expand Down Expand Up @@ -66,7 +66,7 @@ def added_solution_file_path() -> list[pathlib.Path]:
"Accept": "application/vnd.github.v3+json",
"Authorization": "token " + os.environ["GITHUB_TOKEN"],
}
files = requests.get(get_files_url(), headers=headers, timeout=10).json()
files = httpx.get(get_files_url(), headers=headers, timeout=10).json()
for file in files:
filepath = pathlib.Path.cwd().joinpath(file["filename"])
if (
Expand Down
1,108 changes: 582 additions & 526 deletions uv.lock

Large diffs are not rendered by default.

13 changes: 10 additions & 3 deletions web_programming/co2_emission.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,22 +2,29 @@
Get CO2 emission data from the UK CarbonIntensity API
"""

# /// script
# requires-python = ">=3.13"
# dependencies = [
# "httpx",
# ]
# ///

from datetime import date

import requests
import httpx

BASE_URL = "https://api.carbonintensity.org.uk/intensity"


# Emission in the last half hour
def fetch_last_half_hour() -> str:
last_half_hour = requests.get(BASE_URL, timeout=10).json()["data"][0]
last_half_hour = httpx.get(BASE_URL, timeout=10).json()["data"][0]
return last_half_hour["intensity"]["actual"]


# Emissions in a specific date range
def fetch_from_to(start, end) -> list:
return requests.get(f"{BASE_URL}/{start}/{end}", timeout=10).json()["data"]
return httpx.get(f"{BASE_URL}/{start}/{end}", timeout=10).json()["data"]


if __name__ == "__main__":
Expand Down
12 changes: 10 additions & 2 deletions web_programming/covid_stats_via_xpath.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,17 @@
more convenient to use in Python web projects (e.g. Django or Flask-based)
"""

# /// script
# requires-python = ">=3.13"
# dependencies = [
# "httpx",
# "lxml",
# ]
# ///

from typing import NamedTuple

import requests
import httpx
from lxml import html


Expand All @@ -19,7 +27,7 @@ class CovidData(NamedTuple):
def covid_stats(url: str = "https://www.worldometers.info/coronavirus/") -> CovidData:
xpath_str = '//div[@class = "maincounter-number"]/span/text()'
return CovidData(
*html.fromstring(requests.get(url, timeout=10).content).xpath(xpath_str)
*html.fromstring(httpx.get(url, timeout=10).content).xpath(xpath_str)
)


Expand Down
18 changes: 16 additions & 2 deletions web_programming/crawl_google_results.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,28 @@
# /// script
# requires-python = ">=3.13"
# dependencies = [
# "beautifulsoup4",
# "fake-useragent",
# "httpx",
# ]
# ///

import sys
import webbrowser

import requests
import httpx
from bs4 import BeautifulSoup
from fake_useragent import UserAgent

if __name__ == "__main__":
print("Googling.....")
url = "https://www.google.com/search?q=" + " ".join(sys.argv[1:])
res = requests.get(url, headers={"UserAgent": UserAgent().random}, timeout=10)
res = httpx.get(
url,
headers={"UserAgent": UserAgent().random},
timeout=10,
follow_redirects=True,
)
# res.raise_for_status()
with open("project1a.html", "wb") as out_file: # only for knowing the class
for data in res.iter_content(10000):
Expand Down
12 changes: 10 additions & 2 deletions web_programming/crawl_google_scholar_citation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,15 @@
using title and year of publication, and volume and pages of journal.
"""

import requests
# /// script
# requires-python = ">=3.13"
# dependencies = [
# "beautifulsoup4",
# "httpx",
# ]
# ///

import httpx
from bs4 import BeautifulSoup


Expand All @@ -12,7 +20,7 @@ def get_citation(base_url: str, params: dict) -> str:
Return the citation number.
"""
soup = BeautifulSoup(
requests.get(base_url, params=params, timeout=10).content, "html.parser"
httpx.get(base_url, params=params, timeout=10).content, "html.parser"
)
div = soup.find("div", attrs={"class": "gs_ri"})
anchors = div.find("div", attrs={"class": "gs_fl"}).find_all("a")
Expand Down
11 changes: 9 additions & 2 deletions web_programming/currency_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,16 @@
https://www.amdoren.com
"""

# /// script
# requires-python = ">=3.13"
# dependencies = [
# "httpx",
# ]
# ///

import os

import requests
import httpx

URL_BASE = "https://www.amdoren.com/api/currency.php"

Expand Down Expand Up @@ -176,7 +183,7 @@ def convert_currency(
params = locals()
# from is a reserved keyword
params["from"] = params.pop("from_")
res = requests.get(URL_BASE, params=params, timeout=10).json()
res = httpx.get(URL_BASE, params=params, timeout=10).json()
return str(res["amount"]) if res["error"] == 0 else res["error_message"]


Expand Down
14 changes: 11 additions & 3 deletions web_programming/current_stock_price.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
import requests
# /// script
# requires-python = ">=3.13"
# dependencies = [
# "beautifulsoup4",
# "httpx",
# ]
# ///

import httpx
from bs4 import BeautifulSoup

"""
Expand All @@ -20,8 +28,8 @@ def stock_price(symbol: str = "AAPL") -> str:
True
"""
url = f"https://finance.yahoo.com/quote/{symbol}?p={symbol}"
yahoo_finance_source = requests.get(
url, headers={"USER-AGENT": "Mozilla/5.0"}, timeout=10
yahoo_finance_source = httpx.get(
url, headers={"USER-AGENT": "Mozilla/5.0"}, timeout=10, follow_redirects=True
).text
soup = BeautifulSoup(yahoo_finance_source, "html.parser")

Expand Down
13 changes: 10 additions & 3 deletions web_programming/current_weather.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
import requests
# /// script
# requires-python = ">=3.13"
# dependencies = [
# "httpx",
# ]
# ///

import httpx

# Put your API key(s) here
OPENWEATHERMAP_API_KEY = ""
Expand All @@ -19,13 +26,13 @@ def current_weather(location: str) -> list[dict]:
weather_data = []
if OPENWEATHERMAP_API_KEY:
params_openweathermap = {"q": location, "appid": OPENWEATHERMAP_API_KEY}
response_openweathermap = requests.get(
response_openweathermap = httpx.get(
OPENWEATHERMAP_URL_BASE, params=params_openweathermap, timeout=10
)
weather_data.append({"OpenWeatherMap": response_openweathermap.json()})
if WEATHERSTACK_API_KEY:
params_weatherstack = {"query": location, "access_key": WEATHERSTACK_API_KEY}
response_weatherstack = requests.get(
response_weatherstack = httpx.get(
WEATHERSTACK_URL_BASE, params=params_weatherstack, timeout=10
)
weather_data.append({"Weatherstack": response_weatherstack.json()})
Expand Down
12 changes: 10 additions & 2 deletions web_programming/daily_horoscope.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
import requests
# /// script
# requires-python = ">=3.13"
# dependencies = [
# "beautifulsoup4",
# "httpx",
# ]
# ///

import httpx
from bs4 import BeautifulSoup


Expand All @@ -7,7 +15,7 @@ def horoscope(zodiac_sign: int, day: str) -> str:
"https://www.horoscope.com/us/horoscopes/general/"
f"horoscope-general-daily-{day}.aspx?sign={zodiac_sign}"
)
soup = BeautifulSoup(requests.get(url, timeout=10).content, "html.parser")
soup = BeautifulSoup(httpx.get(url, timeout=10).content, "html.parser")
return soup.find("div", class_="main-horoscope").p.text


Expand Down
12 changes: 10 additions & 2 deletions web_programming/download_images_from_google_query.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,18 @@
# /// script
# requires-python = ">=3.13"
# dependencies = [
# "beautifulsoup4",
# "httpx",
# ]
# ///

import json
import os
import re
import sys
import urllib.request

import requests
import httpx
from bs4 import BeautifulSoup

headers = {
Expand Down Expand Up @@ -39,7 +47,7 @@ def download_images_from_google_query(query: str = "dhaka", max_images: int = 5)
"ijn": "0",
}

html = requests.get(
html = httpx.get(
"https://www.google.com/search", params=params, headers=headers, timeout=10
)
soup = BeautifulSoup(html.text, "html.parser")
Expand Down
Loading