Skip to content

Commit 63ec76b

Browse files
committed
Add beginner, intermediate, and advanced Python
scripts with comments on dependencies
1 parent b3d213f commit 63ec76b

22 files changed

+408
-0
lines changed

advanced/api_rate_limiter.py

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
# api_rate_limiter.py
2+
import time
3+
from functools import wraps
4+
5+
def rate_limiter(max_calls, period):
6+
def decorator(func):
7+
calls = []
8+
@wraps(func)
9+
def wrapper(*args, **kwargs):
10+
nonlocal calls
11+
now = time.time()
12+
calls = [call for call in calls if now - call < period]
13+
if len(calls) >= max_calls:
14+
wait_time = period - (now - calls[0])
15+
print(f"Rate limit exceeded. Waiting for {wait_time:.2f} seconds.")
16+
time.sleep(wait_time)
17+
calls.append(now)
18+
return func(*args, **kwargs)
19+
return wrapper
20+
return decorator
21+
22+
@rate_limiter(max_calls=5, period=10)
23+
def api_call():
24+
print("API call made")
25+
26+
if __name__ == "__main__":
27+
for _ in range(10):
28+
api_call()
29+
time.sleep(1)

advanced/async_web_scraper.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
# async_web_scraper.py
2+
# Dependencies: aiohttp
3+
# Install with: pip install aiohttp
4+
5+
import aiohttp
6+
import asyncio
7+
from bs4 import BeautifulSoup
8+
9+
async def fetch(session, url):
10+
async with session.get(url) as response:
11+
return await response.text()
12+
13+
async def scrape_website(url):
14+
async with aiohttp.ClientSession() as session:
15+
html = await fetch(session, url)
16+
soup = BeautifulSoup(html, 'html.parser')
17+
titles = [title.get_text() for title in soup.find_all('h1')]
18+
return titles
19+
20+
if __name__ == "__main__":
21+
url = input("Enter the URL of the website to scrape: ")
22+
titles = asyncio.run(scrape_website(url))
23+
print("Page Titles:")
24+
for title in titles:
25+
print(title)

advanced/chatbot.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
# chatbot.py
2+
# Dependencies: transformers
3+
# Install with: pip install transformers
4+
5+
from transformers import pipeline
6+
7+
def chat():
8+
chatbot = pipeline('conversational', model='facebook/blenderbot-400M-distill')
9+
while True:
10+
user_input = input("You: ")
11+
if user_input.lower() in ['exit', 'quit']:
12+
print("Goodbye!")
13+
break
14+
response = chatbot(user_input)
15+
print(f"Bot: {response['generated_text']}")
16+
17+
if __name__ == "__main__":
18+
chat()

advanced/data_visualization.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
# data_visualization.py
2+
# Dependencies: matplotlib, pandas
3+
# Install with: pip install matplotlib pandas
4+
5+
import matplotlib.pyplot as plt
6+
import pandas as pd
7+
8+
def visualize_data(csv_file):
9+
data = pd.read_csv(csv_file)
10+
plt.figure(figsize=(10, 6))
11+
data.plot(kind='line', x='Date', y='Value', title='Data Visualization')
12+
plt.xlabel('Date')
13+
plt.ylabel('Value')
14+
plt.grid(True)
15+
plt.savefig('visualization.png')
16+
plt.show()
17+
18+
if __name__ == "__main__":
19+
file = input("Enter the path to the CSV file: ")
20+
visualize_data(file)

advanced/web_scraper_advanced.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
# web_scraper.py
2+
# Dependencies: requests, beautifulsoup4
3+
# Install with: pip install requests beautifulsoup4
4+
5+
import requests
6+
from bs4 import BeautifulSoup
7+
import pandas as pd
8+
9+
def scrape_website(url):
10+
response = requests.get(url)
11+
soup = BeautifulSoup(response.text, 'html.parser')
12+
headlines = [h.text for h in soup.find_all('h2')]
13+
return headlines
14+
15+
def save_to_csv(data, filename):
16+
df = pd.DataFrame(data, columns=['Headlines'])
17+
df.to_csv(filename, index=False)
18+
19+
if __name__ == "__main__":
20+
url = 'https://news.ycombinator.com/'
21+
headlines = scrape_website(url)
22+
save_to_csv(headlines, 'headlines.csv')
23+
print("Headlines saved to headlines.csv")

beginner/fibonacci_sequence.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
# fibonacci_sequence.py
2+
def fibonacci(n):
3+
a, b = 0, 1
4+
sequence = []
5+
while a < n:
6+
sequence.append(a)
7+
a, b = b, a + b
8+
return sequence
9+
10+
if __name__ == "__main__":
11+
num = int(input("Generate Fibonacci sequence up to: "))
12+
print(f"Fibonacci sequence up to {num}: {fibonacci(num)}")

beginner/hello_world.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# hello_world.py
2+
print("Hello, World!")

beginner/number_guessing_game.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
# number_guessing_game.py
2+
import random
3+
4+
def guess_number():
5+
number = random.randint(1, 100)
6+
attempts = 0
7+
while True:
8+
guess = int(input("Guess a number between 1 and 100: "))
9+
attempts += 1
10+
if guess < number:
11+
print("Too low!")
12+
elif guess > number:
13+
print("Too high!")
14+
else:
15+
print(f"Congratulations! You've guessed the number {number} in {attempts} attempts.")
16+
break
17+
18+
if __name__ == "__main__":
19+
guess_number()

beginner/number_to_words.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
# number_to_words.py
2+
def number_to_words(n):
3+
ones = ["Zero", "One", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine"]
4+
teens = ["Ten", "Eleven", "Twelve", "Thirteen", "Fourteen", "Fifteen", "Sixteen", "Seventeen", "Eighteen", "Nineteen"]
5+
tens = ["Twenty", "Thirty", "Forty", "Fifty", "Sixty", "Seventy", "Eighty", "Ninety"]
6+
7+
if 0 <= n < 10:
8+
return ones[n]
9+
elif 10 <= n < 20:
10+
return teens[n - 10]
11+
elif 20 <= n < 100:
12+
return tens[n // 10 - 2] + ('' if n % 10 == 0 else '-' + ones[n % 10])
13+
else:
14+
return "Number out of range"
15+
16+
if __name__ == "__main__":
17+
number = int(input("Enter a number between 0 and 99: "))
18+
print(f"Number in words: {number_to_words(number)}")

beginner/prime_number_checker.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# prime_number_checker.py
2+
def is_prime(n):
3+
if n <= 1:
4+
return False
5+
for i in range(2, int(n**0.5) + 1):
6+
if n % i == 0:
7+
return False
8+
return True
9+
10+
if __name__ == "__main__":
11+
num = int(input("Enter a number to check if it's prime: "))
12+
if is_prime(num):
13+
print(f"{num} is a prime number.")
14+
else:
15+
print(f"{num} is not a prime number.")

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy