Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added .DS_Store
Binary file not shown.
87 changes: 87 additions & 0 deletions hw-05/task5.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
class Node:
"""Элемент списка List2L"""
def __init__(self, key=None, value=None):
"""Инициализация"""
self.key = key
self.value = value
self.prev = None
self.next = None

def get_key(self):
"""Получить ключ"""
return self.key

def get_value(self):
"""Получить значение"""
return self.value

def set_value(self, val):
"""Установить значение"""
self.value = val


class List2L:
"""Классический двусвязный список"""
def __init__(self):
"""Инициализация"""
self.head = Node()
self.tail = Node()
self.head.next = self.tail
self.tail.prev = self.head

def add_front(self, node):
"""Добавляем ноду в начало"""
node.prev = self.head
node.next = self.head.next

self.head.next.prev = node
self.head.next = node

def remove(self, node):
"""Удаляем ноду из любой позиции"""
prev_node = node.prev
next_node = node.next

prev_node.next = next_node
next_node.prev = prev_node

def pop_last(self):
"""Достаём последний элемент списка"""
last_node = self.tail.prev
self.remove(last_node)
return last_node


class LRUCache:
"""Класс для кеширования объектов"""
def __init__(self, limit=42):
"""Инициализация"""
self.limit = limit
self.list = List2L()
self.cache = {}

def get(self, key):
"""Получить значение"""
if key not in self.cache:
return None

node = self.cache[key]
self.list.remove(node)
self.list.add_front(node)
return node.get_value()

def set(self, key, value):
"""Установить значение"""
if key in self.cache:
node = self.cache[key]
node.set_value(value)
self.list.remove(node)
self.list.add_front(node)
else:
if len(self.cache) >= self.limit:
last_node = self.list.pop_last()
self.cache.pop(last_node.get_key())

new_node = Node(key, value)
self.cache[key] = new_node
self.list.add_front(new_node)
18 changes: 18 additions & 0 deletions hw-05/test_task5.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from task5 import LRUCache


def test_predict_general_cases():
cache = LRUCache(2)

cache.set("k1", "val1")
cache.set("k2", "val2")

assert cache.get("k3") is None
assert cache.get("k2") == "val2"
assert cache.get("k1") == "val1"

cache.set("k3", "val3")

assert cache.get("k3") == "val3"
assert cache.get("k2") is None
assert cache.get("k1") == "val1"
81 changes: 81 additions & 0 deletions hw-07/fetcher.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
"""Async URL fetcher: для асинхронной обкачки урлов"""

import argparse
import ssl
import asyncio
import aiohttp


async def fetch_url(url, session, sem):
"""
Асинхронно скачивает старницу
Через semaphor контролирует кол-во одновременных запросов
"""
async with sem:
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
async with session.get(url, ssl=ssl_context) as resp:
text = await resp.text()
print(f"Fetched {url}")
return text
except (asyncio.TimeoutError,
aiohttp.client_exceptions.ClientConnectorDNSError) as e:
print(f"Error fetching {url}: {e}")
return None


async def fetch_batch_urls(urls, session, sem):
"""
Асинхронно запускает все задачи на обкачку урлов + вывод
"""
tasks = [
asyncio.create_task(fetch_url(url, session, sem))
for url in urls
]
result = await asyncio.gather(*tasks)
for i, text in enumerate(result):
if text is not None:
print(f"Result {i}: {text[:100]}")


def read_urls_from_file(filename):
"""
Считывает урлы из файла
"""
with open(filename, "r", encoding="utf-8") as f:
return [line.strip() for line in f if line.strip()]


def parse_args():
"""
Собирает аргументы командной строки
"""
parser = argparse.ArgumentParser()
parser.add_argument("concurrency", type=int)
parser.add_argument("urlfile", type=str)
return parser.parse_args()


async def run(urls, concurrency):
"""
Создаёт объекты семафор и aiohttp, запускает обкачку urlов
"""
sem = asyncio.Semaphore(concurrency)
async with aiohttp.ClientSession() as session:
await fetch_batch_urls(urls, session, sem)


def main():
"""
Точка входа. Собираем аргументы командной строки, считываем данные
И запускаем асинхронное выполнение задач
"""
args = parse_args()
urls = read_urls_from_file(args.urlfile)
asyncio.run(run(urls, args.concurrency))


if __name__ == "__main__":
main()
63 changes: 63 additions & 0 deletions hw-07/test_task7.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
""" Файл для тестирования """


import asyncio
import sys
import aiohttp
import pytest


from fetcher import read_urls_from_file, parse_args, run
from fetcher import fetch_url


@pytest.mark.asyncio
async def test_fetch_url_success():
"""Сценарий успешного фетчинга"""
sem = asyncio.Semaphore(1)
with open('urls.txt', encoding='utf-8') as f:
url = f.read().split('\n')[-1]
async with aiohttp.ClientSession() as session:

text = await fetch_url(url, session, sem)
assert text is not None
assert 'decoreo.ru' in text


@pytest.mark.asyncio
async def test_fetch_url_fail():
"""Сценарий не успешного фетчинга"""
sem = asyncio.Semaphore(1)
async with aiohttp.ClientSession() as session:
url = "https://some-weird-link/"
text = await fetch_url(url, session, sem)
assert text is None


def test_read_urls_from_file():
"""Тестирует функцию чтения урлов из файла"""
with open('urls.txt', encoding='utf-8') as f:
old_urls = f.read().split('\n')
with open('test_urls.txt', 'w', encoding='utf-8') as f:
f.write('\n'.join(old_urls))
urls = read_urls_from_file(f.name)
assert urls == old_urls


def test_parse_args(monkeypatch):
"""Тестирует парсинг аргументов командной строки"""
test_args = ["fetcher.py", "5", "urls.txt"]
monkeypatch.setattr(sys, "argv", test_args)
args = parse_args()
assert args.concurrency == 5
assert args.urlfile == "urls.txt"


@pytest.mark.asyncio
async def test_run(tmp_path):
"""Тестирует функцию run."""
file = tmp_path / "urls.txt"
lurl = "https://geotargetly.com/"
file.write_text(lurl)
urls = read_urls_from_file(str(file))
await run(urls, 1)
6 changes: 6 additions & 0 deletions hw-07/test_urls.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
https://ru.wikipedia.org/wiki/Заглавная_страница
https://ru.wikipedia.org/wiki/Заглавная_страница
https://ru.wikipedia.org/wiki/Заглавная_страница
https://decoreo.ru/product-category/mebel/shkafy-kupe/?srsltid=AfmBOoo9YSOL-iXEHJx2hjSo3W5KMWCl9L20OBtFUvS3nCDgAe3dBR5B
https://decoreo.ru/product-category/mebel/shkafy-kupe/?srsltid=AfmBOoo9YSOL-iXEHJx2hjSo3W5KMWCl9L20OBtFUvS3nCDgAe3dBR5B
https://decoreo.ru/product-category/mebel/shkafy-kupe/?srsltid=AfmBOoo9YSOL-iXEHJx2hjSo3W5KMWCl9L20OBtFUvS3nCDgAe3dBR5B
6 changes: 6 additions & 0 deletions hw-07/urls.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
https://ru.wikipedia.org/wiki/Заглавная_страница
https://ru.wikipedia.org/wiki/Заглавная_страница
https://ru.wikipedia.org/wiki/Заглавная_страница
https://decoreo.ru/product-category/mebel/shkafy-kupe/?srsltid=AfmBOoo9YSOL-iXEHJx2hjSo3W5KMWCl9L20OBtFUvS3nCDgAe3dBR5B
https://decoreo.ru/product-category/mebel/shkafy-kupe/?srsltid=AfmBOoo9YSOL-iXEHJx2hjSo3W5KMWCl9L20OBtFUvS3nCDgAe3dBR5B
https://decoreo.ru/product-category/mebel/shkafy-kupe/?srsltid=AfmBOoo9YSOL-iXEHJx2hjSo3W5KMWCl9L20OBtFUvS3nCDgAe3dBR5B
55 changes: 0 additions & 55 deletions lesson-01/homework.md

This file was deleted.

Binary file removed lesson-01/lesson-01.pdf
Binary file not shown.
Loading