Первая публикация

This commit is contained in:
2024-12-17 14:40:56 +04:00
commit 722be30443
19 changed files with 2618 additions and 0 deletions
+127
View File
@@ -0,0 +1,127 @@
import logging
import os
import sys
import warnings
from ruamel.yaml import YAML
DEFAULT_ROOT_DESTINATION = "./icloud"
DEFAULT_DRIVE_DESTINATION = "drive"
DEFAULT_DRIVE_DESTINATION_EXPORT = "drive_export"
DEFAULT_RETRY_LOGIN_INTERVAL_SEC = 600 # 10 minutes
DEFAULT_SYNC_INTERVAL_SEC = 1800 # 30 minutes
DEFAULT_CONFIG_FILE_NAME = "config.yaml"
ENV_ICLOUD_PASSWORD_KEY = "ENV_ICLOUD_PASSWORD"
ENV_CONFIG_FILE_PATH_KEY = "ENV_CONFIG_FILE_PATH"
DEFAULT_LOGGER_LEVEL = "info"
DEFAULT_LOG_FILE_NAME = "icloud.log"
DEFAULT_CONFIG_FILE_PATH = os.path.join(
os.path.dirname(os.path.dirname(__file__)), DEFAULT_CONFIG_FILE_NAME
)
DEFAULT_COOKIE_DIRECTORY = "session_data"
warnings.filterwarnings("ignore", category=DeprecationWarning)
def read_config(config_path=DEFAULT_CONFIG_FILE_PATH):
if not (config_path and os.path.exists(config_path)):
print(f"Файл конфигурации не найден по адресу {config_path}.")
return None
with open(file=config_path, encoding="utf-8") as config_file:
config = YAML().load(config_file)
config["app"]["credentials"]["username"] = (
config["app"]["credentials"]["username"].strip()
if config["app"]["credentials"]["username"] is not None
else ""
)
return config
def get_logger_config(config):
logger_config = {}
if "logger" not in config["app"]:
return None
config_app_logger = config["app"]["logger"]
logger_config["level"] = (
config_app_logger["level"].strip().lower()
if "level" in config_app_logger
else DEFAULT_LOGGER_LEVEL
)
logger_config["filename"] = (
config_app_logger["filename"].strip().lower()
if "filename" in config_app_logger
else DEFAULT_LOG_FILE_NAME
)
return logger_config
def log_handler_exists(logger, handler_type, **kwargs):
for handler in logger.handlers:
if isinstance(handler, handler_type):
if handler_type is logging.FileHandler:
if handler.baseFilename.endswith(kwargs["filename"]):
return True
elif handler_type is logging.StreamHandler:
if handler.stream is kwargs["stream"]:
return True
return False
class ColorfulConsoleFormatter(logging.Formatter):
grey = "\x1b[38;21m"
blue = "\x1b[38;5;39m"
yellow = "\x1b[38;5;226m"
red = "\x1b[38;5;196m"
bold_red = "\x1b[31;1m"
reset = "\x1b[0m"
def __init__(self, fmt):
super().__init__()
self.fmt = fmt
self.formats = {
logging.DEBUG: self.grey + self.fmt + self.reset,
logging.INFO: self.blue + self.fmt + self.reset,
logging.WARNING: self.yellow + self.fmt + self.reset,
logging.ERROR: self.red + self.fmt + self.reset,
logging.CRITICAL: self.bold_red + self.fmt + self.reset,
}
def format(self, record):
log_fmt = self.formats.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record)
def get_logger():
logger = logging.getLogger()
logger_config = get_logger_config(config=read_config())
if logger_config:
level_name = logging.getLevelName(level=logger_config["level"].upper())
logger.setLevel(level=level_name)
if not log_handler_exists(
logger=logger,
handler_type=logging.FileHandler,
filename=logger_config["filename"],
):
file_handler = logging.FileHandler(logger_config["filename"])
file_handler.setFormatter(
logging.Formatter(
"%(asctime)s :: %(levelname)s :: %(name)s :: %(filename)s :: %(lineno)d :: %(message)s"
)
)
logger.addHandler(file_handler)
if not log_handler_exists(
logger=logger, handler_type=logging.StreamHandler, stream=sys.stdout
):
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(
ColorfulConsoleFormatter(
"%(asctime)s :: %(levelname)s :: %(name)s :: %(filename)s :: %(lineno)d :: %(message)s"
)
)
logger.addHandler(console_handler)
return logger
LOGGER = get_logger()
+159
View File
@@ -0,0 +1,159 @@
import os
from src import (
DEFAULT_DRIVE_DESTINATION,
DEFAULT_DRIVE_DESTINATION_EXPORT,
DEFAULT_RETRY_LOGIN_INTERVAL_SEC,
DEFAULT_ROOT_DESTINATION,
DEFAULT_SYNC_INTERVAL_SEC,
LOGGER,
)
def config_path_to_string(config_path):
return " > ".join(config_path)
def traverse_config_path(config, config_path: list[str]) -> bool:
if len(config_path) == 0:
return True
if not (config and config_path[0] in config):
return False
return traverse_config_path(config[config_path[0]], config_path=config_path[1:])
def get_config_value(config, config_path):
if len(config_path) == 1:
return config[config_path[0]]
return get_config_value(config=config[config_path[0]], config_path=config_path[1:])
def get_username(config):
username = None
config_path = ["app", "credentials", "username"]
if not traverse_config_path(config=config, config_path=config_path):
LOGGER.error(
f"username отсутствует в {config_path_to_string(config_path)}. Пожалуйста, установите имя пользователя."
)
else:
username = get_config_value(config=config, config_path=config_path)
username = username.strip()
if len(username) == 0:
username = None
LOGGER.error(f"username пустое в {config_path_to_string(config_path)}.")
return username
def get_retry_login_interval(config):
retry_login_interval = DEFAULT_RETRY_LOGIN_INTERVAL_SEC
config_path = ["app", "credentials", "retry_login_interval"]
if not traverse_config_path(config=config, config_path=config_path):
LOGGER.warning(
f"retry_login_interval не найден в {config_path_to_string(config_path=config_path)}."
+ f" Использование по умолчанию {retry_login_interval} секунд ..."
)
else:
retry_login_interval = get_config_value(config=config, config_path=config_path)
LOGGER.info(f"Повторная попытка входа каждые {retry_login_interval} секунд.")
return retry_login_interval
def get_drive_sync_interval(config):
sync_interval = DEFAULT_SYNC_INTERVAL_SEC
config_path = ["drive", "sync_interval"]
if not traverse_config_path(config=config, config_path=config_path):
LOGGER.warning(
f"sync_interval не найден в {config_path_to_string(config_path=config_path)}."
+ f" Использование sync_interval по умолчанию: {sync_interval} секунд ..."
)
else:
sync_interval = get_config_value(config=config, config_path=config_path)
return sync_interval
def prepare_root_destination(config):
LOGGER.debug("Проверка root ...")
root_destination = DEFAULT_ROOT_DESTINATION
config_path = ["app", "root"]
if not traverse_config_path(config=config, config_path=config_path):
LOGGER.warning(
f"Предупреждение: root отсутствует в {config_path_to_string(config_path)}."
+ f" Использование root по умолчанию: {root_destination}",
)
else:
root_destination = get_config_value(config=config, config_path=config_path)
root_destination_path = os.path.abspath(root_destination)
os.makedirs(root_destination_path, exist_ok=True)
return root_destination_path
def prepare_drive_destination(config):
LOGGER.debug("Проверка пути сохранения фалов...")
config_path = ["drive", "destination"]
drive_destination = DEFAULT_DRIVE_DESTINATION
if not traverse_config_path(config=config, config_path=config_path):
LOGGER.warning(
f"Внимание: путь сохранения фалов отсутствует в {config_path_to_string(config_path)}."
+ f" Использование путь сохранения фалов по умолчанию: {drive_destination}."
)
else:
drive_destination = get_config_value(config=config, config_path=config_path)
drive_destination_path = os.path.abspath(
os.path.join(prepare_root_destination(config=config), drive_destination)
)
os.makedirs(drive_destination_path, exist_ok=True)
return drive_destination_path
def prepare_drive_destination_export(config):
LOGGER.debug("Проверка пути конвертации фалов ...")
config_path = ["drive", "destination_export"]
drive_destination_export = DEFAULT_DRIVE_DESTINATION_EXPORT
if not traverse_config_path(config=config, config_path=config_path):
LOGGER.warning(
f"Внимание: путь конвертации фалов отсутствует в {config_path_to_string(config_path)}."
+ f" Использование путь конвертации фалов по умолчанию: {drive_destination_export}."
)
else:
drive_destination_export = get_config_value(config=config, config_path=config_path)
drive_destination_export_path = os.path.abspath(
os.path.join(prepare_root_destination(config=config), drive_destination_export)
)
os.makedirs(drive_destination_export_path, exist_ok=True)
return drive_destination_export_path
def get_drive_remove_obsolete(config):
drive_remove_obsolete = False
config_path = ["drive", "remove_obsolete"]
if not traverse_config_path(config=config, config_path=config_path):
LOGGER.warning(
f"Предупреждение: remove_obsolete не найден в {config_path_to_string(config_path)}."
+ " Используется параметр не удалять устаревшие файлы и папки."
)
else:
drive_remove_obsolete = get_config_value(config=config, config_path=config_path)
return drive_remove_obsolete
def get_region(config):
region = "global"
config_path = ["app", "region"]
if not traverse_config_path(config=config, config_path=config_path):
LOGGER.warning(
f"{config_path_to_string(config_path=config_path)} не найдено. Использует значение по умолчанию - global ..."
)
else:
region = get_config_value(config=config, config_path=config_path)
if region not in ["global", "china"]:
LOGGER.error(
f"{config_path_to_string(config_path=config_path)} недействительно. \
Допустимые значения - global или china. Использование значения по умолчанию - global ..."
)
region = "global"
return region
+126
View File
@@ -0,0 +1,126 @@
import datetime
import os
from time import sleep
from icloudpy import ICloudPyService, exceptions, utils
from src import (
DEFAULT_CONFIG_FILE_PATH,
DEFAULT_COOKIE_DIRECTORY,
ENV_CONFIG_FILE_PATH_KEY,
ENV_ICLOUD_PASSWORD_KEY,
LOGGER,
config_parser,
read_config,
sync_drive
)
def get_api_instance(
username,
password,
cookie_directory=DEFAULT_COOKIE_DIRECTORY,
server_region="global",
):
return (
ICloudPyService(
apple_id=username,
password=password,
cookie_directory=cookie_directory,
home_endpoint="https://www.icloud.com.cn",
setup_endpoint="https://setup.icloud.com.cn/setup/ws/1",
)
if server_region == "china"
else ICloudPyService(
apple_id=username,
password=password,
cookie_directory=cookie_directory,
)
)
def sync():
last_send = None
enable_sync_drive = True
drive_sync_interval = 0
sleep_for = 10
while True:
config = read_config(
config_path=os.environ.get(
ENV_CONFIG_FILE_PATH_KEY, DEFAULT_CONFIG_FILE_PATH
)
)
username = config_parser.get_username(config=config)
if username:
try:
if ENV_ICLOUD_PASSWORD_KEY in os.environ:
password = os.environ.get(ENV_ICLOUD_PASSWORD_KEY)
utils.store_password_in_keyring(
username=username, password=password
)
else:
password = utils.get_password_from_keyring(username=username)
server_region = config_parser.get_region(config=config)
api = get_api_instance(
username=username, password=password, server_region=server_region
)
if not api.requires_2sa:
if "drive" in config and enable_sync_drive:
LOGGER.info("Синхронизация drive...")
vvvv = sync_drive.sync_drive(config=config, drive=api.drive, work=api.work)
LOGGER.info("Drive синхронизирован")
my_file = open("last_update.txt", "w")
my_file.write(datetime.datetime.now().isoformat())
my_file.close()
drive_sync_interval = config_parser.get_drive_sync_interval(
config=config
)
if "drive" not in config:
LOGGER.warning(
"Нечего синхронизировать. Добавьте раздел drive в файл config.yaml."
)
else:
LOGGER.error("Ошибка: требуется 2FA. Пожалуйста, войдите в систему.")
sleep_for = config_parser.get_retry_login_interval(config=config)
next_sync = (
datetime.datetime.now() + datetime.timedelta(seconds=sleep_for)
).strftime("%c")
if sleep_for < 0:
LOGGER.info("retry_login_interval is < 0, exiting ...")
break
LOGGER.info(f"Повторная попытка входа в {next_sync} ...")
sleep(sleep_for)
continue
except exceptions.ICloudPyNoStoredPasswordAvailableException:
LOGGER.error(
"Пароль не сохранен в связке ключей. Пожалуйста, сохраните пароль в связке ключей."
)
sleep_for = config_parser.get_retry_login_interval(config=config)
next_sync = (
datetime.datetime.now() + datetime.timedelta(seconds=sleep_for)
).strftime("%c")
LOGGER.info(f"Повторная попытка входа в систему {next_sync} ...")
sleep(sleep_for)
continue
if "drive" in config:
sleep_for = drive_sync_interval
enable_sync_drive = True
enable_sync_photos = False
next_sync = (
datetime.datetime.now() + datetime.timedelta(seconds=sleep_for)
).strftime("%c")
LOGGER.info(f"Повторная синхронизация в {next_sync} ...")
if (
config_parser.get_drive_sync_interval(config=config) < 0
if "drive" in config
else True and config_parser.get_photos_sync_interval(config=config) < 0
):
break
sleep(sleep_for)
+353
View File
@@ -0,0 +1,353 @@
import gzip
import os
import re
import time
import unicodedata
import zipfile
from pathlib import Path, PurePath
from shutil import copyfileobj, rmtree
from time import sleep
import json
from icloudpy import exceptions
from src import LOGGER, config_parser
def wanted_file(filters, ignore, file_path):
if not file_path:
return False
if ignore:
if ignored_path(ignore, file_path):
LOGGER.debug(f"Пропуск ненужного файла {file_path}")
return False
if not filters or len(filters) == 0:
return True
for file_extension in filters:
if re.search(f"{file_extension}$", file_path, re.IGNORECASE):
return True
LOGGER.debug(f"Пропуск ненужного файла {file_path}")
return False
def wanted_file_name(filters, item):
if not filters or len(filters) == 0:
return True
for file_name in filters:
if item.data['name'] == file_name:
return True
LOGGER.debug(f"Пропуск ненужного файла {file_path}")
return False
def wanted_folder(filters, ignore, root, folder_path):
if ignore:
if ignored_path(ignore, folder_path):
return False
if not filters or not folder_path or not root or len(filters) == 0:
return True
folder_path = Path(folder_path)
for folder in filters:
child_path = Path(
os.path.join(
os.path.abspath(root), str(folder).removeprefix("/").removesuffix("/")
)
)
if (
folder_path in child_path.parents
or child_path in folder_path.parents
or folder_path == child_path
):
return True
return False
def ignored_path(ignore_list, path):
for ignore in ignore_list:
if PurePath(path).match(ignore + "*" if ignore.endswith("/") else ignore):
return True
return False
def wanted_parent_folder(filters, ignore, root, folder_path):
if not filters or not folder_path or not root or len(filters) == 0:
return True
folder_path = Path(folder_path)
for folder in filters:
child_path = Path(
os.path.join(
os.path.abspath(root), folder.removeprefix("/").removesuffix("/")
)
)
if child_path in folder_path.parents or folder_path == child_path:
return True
return False
def process_folder(item, destination_path, filters, ignore, root):
if not (item and destination_path and root):
return None
new_directory = os.path.join(destination_path, item.name)
new_directory_norm = unicodedata.normalize("NFC", new_directory)
if not wanted_folder(
filters=filters, ignore=ignore, folder_path=new_directory_norm, root=root
):
LOGGER.debug(f"Пропуск ненужной папки {new_directory} ...")
return None
os.makedirs(new_directory_norm, exist_ok=True)
return new_directory
def package_exists(item, local_package_path):
if item and local_package_path and os.path.isdir(local_package_path):
local_package_modified_time = int(os.path.getmtime(local_package_path))
remote_package_modified_time = int(item.date_modified.timestamp())
local_package_size = sum(
f.stat().st_size
for f in Path(local_package_path).glob("**/*")
if f.is_file()
)
remote_package_size = item.size
if (
local_package_modified_time == remote_package_modified_time
and local_package_size == remote_package_size
):
LOGGER.debug(
f"Изменений не обнаружено. Пропуск пакета {local_package_path} ..."
)
return True
else:
LOGGER.info(
f"Обнаружены изменения: local_modified_time равно {local_package_modified_time}, "
+ f"remote_modified_time равно {remote_package_modified_time}, "
+ f"local_package_size равен {local_package_size} и remote_package_size равен {remote_package_size}."
)
rmtree(local_package_path)
else:
LOGGER.debug(f"Пакет {local_package_path} локально не существует.")
return False
def file_exists(item, local_file):
if item and local_file and os.path.isfile(local_file):
local_file_modified_time = int(os.path.getmtime(local_file))
remote_file_modified_time = int(item.date_modified.timestamp())
local_file_size = os.path.getsize(local_file)
remote_file_size = item.size
if local_file_modified_time == remote_file_modified_time:
LOGGER.debug(f"Изменений не обнаружено. Файл {local_file} пропущен...")
return True
else:
LOGGER.debug(
f"Обнаружены изменения: local_modified_time равно {local_file_modified_time}, "
+ f"remote_modified_time равно {remote_file_modified_time}, "
+ f"local_file_size равен {local_file_size} и remote_file_size равен {remote_file_size}."
)
else:
LOGGER.debug(f"Файл {local_file} локально не существует.")
return False
def process_package(local_file):
return local_file
def is_package(item):
file_is_a_package = False
with item.open(stream=True) as response:
file_is_a_package = response.url and "/packageDownload?" in response.url
return file_is_a_package
def download_file(item, local_file):
if not (item and local_file):
return False
LOGGER.info(f"Загрузка {local_file} ...")
try:
with item.open(stream=True) as response:
with open(local_file, "wb") as file_out:
for chunk in response.iter_content(4 * 1024 * 1024):
file_out.write(chunk)
if response.url and "/packageDownload?" in response.url:
local_file = process_package(local_file=local_file)
item_modified_time = time.mktime(item.date_modified.timetuple())
os.utime(local_file, (item_modified_time, item_modified_time))
except (exceptions.ICloudPyAPIResponseException, FileNotFoundError, Exception) as e:
LOGGER.error(f"Ошибка скачивания {local_file}: {str(e)}")
return False
return local_file
def process_file(item, destination_path, destination_path_export, filters, filters_name, ignore, files, work, convert):
if not (item and destination_path and files is not None):
return False
local_file = os.path.join(destination_path, item.name)
local_file = unicodedata.normalize("NFC", local_file)
if not wanted_file(filters=filters, ignore=ignore, file_path=local_file):
return False
if not wanted_file_name(filters=filters_name, item=item):
return False
files.add(local_file)
item_is_package = is_package(item=item)
if not item_is_package:
if package_exists(item=item, local_package_path=local_file):
for f in Path(local_file).glob("**/*"):
files.add(str(f))
return False
elif file_exists(item=item, local_file=local_file):
return False
local_file = download_file(item=item, local_file=local_file)
if item_is_package:
for f in Path(local_file).glob("**/*"):
f = str(f)
f_normalized = unicodedata.normalize("NFD", f)
if os.path.exists(f):
os.rename(f, f_normalized)
files.add(f_normalized)
for convert_file in convert:
if item.data['name'] == convert_file['name']:
if item.data['extension'] == "numbers":
LOGGER.info(f"Конвертация в xlsx {local_file} ...")
secret = json.dumps({"Type":"wp","Data":convert_file['secret']})
job_id = work.export_response(item.data['docwsid'], secret, item.data['zone'])
try:
while not work.check_job(job_id):
sleep(5)
work.download_file(job_id, destination_path_export, item.data['name'])
local_export_filename = os.path.join(destination_path_export, item.data['name'] + f".xlsx")
LOGGER.info(f"Сконвертированый файл успешно загружен {local_export_filename} ...")
except Exception as e:
LOGGER.error(f"Ошибка конвертации файла {local_file}: {str(e)}")
return True
def remove_obsolete(destination_path, files):
removed_paths = set()
if not (destination_path and files is not None):
return removed_paths
for path in Path(destination_path).rglob("*"):
local_file = str(path.absolute())
if local_file not in files:
LOGGER.info(f"Удаление {local_file} ...")
if path.is_file():
path.unlink(missing_ok=True)
removed_paths.add(local_file)
elif path.is_dir():
rmtree(local_file)
removed_paths.add(local_file)
return removed_paths
def sync_directory(
drive,
work,
destination_path,
destination_path_export,
items,
root,
top=True,
filters=None,
convert=None,
ignore=None,
remove=False,
):
files = set()
if drive and destination_path and items and root:
for i in items:
item = drive[i]
if item.type in ("folder", "app_library"):
new_folder = process_folder(
item=item,
destination_path=destination_path,
filters=filters["folders"]
if filters and "folders" in filters
else None,
ignore=ignore,
root=root,
)
new_folder_export = process_folder(
item=item,
destination_path=destination_path_export,
filters=filters["folders"]
if filters and "folders" in filters
else None,
ignore=ignore,
root=root,
)
if not new_folder:
continue
try:
files.add(unicodedata.normalize("NFC", new_folder))
files.update(
sync_directory(
drive=item,
work=work,
destination_path=new_folder,
destination_path_export=new_folder_export,
items=item.dir(),
root=root,
top=False,
filters=filters,
convert=convert,
ignore=ignore,
)
)
except Exception:
pass
elif item.type == "file":
if wanted_parent_folder(
filters=filters["folders"]
if filters and "folders" in filters
else None,
ignore=ignore,
root=root,
folder_path=destination_path,
):
try:
process_file(
item=item,
destination_path=destination_path,
destination_path_export=destination_path_export,
filters=filters["file_extensions"]
if filters and "file_extensions" in filters
else None,
filters_name=filters["file_name"]
if filters and "file_name" in filters
else None,
ignore=ignore,
files=files,
work=work,
convert=convert,
)
except Exception:
pass
if top and remove:
remove_obsolete(destination_path=destination_path, files=files)
return files
def sync_drive(config, drive, work):
destination_path = config_parser.prepare_drive_destination(config=config)
destination_path_export = config_parser.prepare_drive_destination_export(config=config)
return sync_directory(
drive=drive,
work=work,
destination_path=destination_path,
destination_path_export=destination_path_export,
root=destination_path,
items=drive.dir(),
top=True,
filters=config["drive"]["filters"]
if "drive" in config and "filters" in config["drive"]
else None,
convert=config["drive"]["convert"]
if "drive" in config and "convert" in config["drive"]
else None,
ignore=config["drive"]["ignore"]
if "drive" in config and "ignore" in config["drive"]
else None,
remove=config_parser.get_drive_remove_obsolete(config=config),
)