Delete user_tools directory

This commit is contained in:
David Rotermund 2024-07-15 16:29:55 +02:00 committed by GitHub
parent 514797c901
commit 892248d808
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 0 additions and 382 deletions

View file

@ -1 +0,0 @@

View file

@ -1,5 +0,0 @@
{
"allowed_domains": [
"uni-bremen.de"
]
}

View file

@ -1,5 +0,0 @@
{
"blocked_users": [
""
]
}

View file

@ -1,63 +0,0 @@
import imaplib
import json
def check_mails(
config_file: str = "config.json", delete_mails: bool = False
) -> list[dict]:
result: list[dict] = []
with open(config_file, "r") as file:
setting: dict = json.load(file)
with imaplib.IMAP4_SSL(
host=setting["host"], port=setting["port"]
) as imap_connection:
imap_connection.login(user=setting["user"], password=setting["password"])
# open inbox
response_open = imap_connection.select(mailbox="INBOX", readonly=False)
assert response_open[0] == "OK"
assert response_open[1] is not None
assert response_open[1][0] is not None
number_of_emails: int = int(response_open[1][0])
if number_of_emails > 0:
# We want to find all mails in the INBOX
inbox_typ, inbox_data = imap_connection.search(None, "ALL")
assert inbox_typ == "OK"
# Browse through all emails
for mail_id in inbox_data[0].split(b" "):
assert mail_id is not None
# Get the next email for processing
email_typ, email_data = imap_connection.fetch(mail_id, "(RFC822)")
assert email_typ == "OK"
assert email_data is not None
assert email_data[0] is not None
assert email_data[0][1] is not None
field_from: str | None = None
field_to: str | None = None
field_subject: str | None = None
for segments in email_data[0][1].split(b"\r\n"): # type: ignore
if segments.startswith(b"From:"):
field_from = segments.decode("utf-8")[6:]
if segments.startswith(b"To:"):
field_to = segments.decode("utf-8")[4:]
if segments.startswith(b"Subject:"):
field_subject = segments.decode("utf-8")[9:]
item = {"from": field_from, "to": field_to, "subject": field_subject}
result.append(item)
if delete_mails:
imap_connection.store(mail_id, "+FLAGS", "\\Deleted")
# The trash is emptied
imap_connection.expunge()
# close inbox
imap_connection.close()
return result

View file

@ -1,7 +0,0 @@
{
"overleafdomain": "https://overleaf.neuro.uni-bremen.de",
"user": "XXXXXX",
"password": "XXXXXX",
"host": "XXXXX",
"port": 993
}

View file

@ -1,13 +0,0 @@
import pymongo
def does_user_exists(email_to_find: str) -> bool:
client = pymongo.MongoClient("overleafmongo", 27017)
db = client.sharelatex
users = db.users
search_result = users.find_one({"email": email_to_find})
if search_result is None:
return False
else:
return True

View file

@ -1,26 +0,0 @@
import docker
def identify_container(client, label: str = "overleaf") -> int | None:
containers = client.containers.list(all=True)
for container in containers:
if str(container.name) == label:
return container.id
return None
def docker_exec(exec_command: str = "ls") -> tuple[bool, str]:
client = docker.from_env()
docker_id = identify_container(client)
assert docker_id is not None
container = client.containers.get(docker_id)
command = f'/bin/bash -c "{exec_command}"'
try:
result = container.exec_run(command, stream=False)
result_string: str = result.output
return True, result_string
except docker.errors.APIError as e:
return False, str("")

View file

@ -1,87 +0,0 @@
import pymongo
import datetime
import os
import pickle
import json
def get_activation_requests(remove_old_entries: bool = False) -> list[dict]:
results: list[dict] = []
filename = "last_run_activations.pkl"
with open("config_mail.json", "r") as file:
config = json.load(file)
now = datetime.datetime.now()
client = pymongo.MongoClient("overleafmongo", 27017)
db = client.sharelatex
tokens = db.tokens
continue_at_time = None
if remove_old_entries:
if os.path.exists(filename):
with open(filename, "rb") as file:
continue_at_time = pickle.load(file)
query = {"expiresAt": {"$gt": now}}
if continue_at_time is not None:
query["createdAt"] = {"$gt": continue_at_time}
newest = None
for token in tokens.find(query):
if newest is None:
newest = token["createdAt"]
elif token["createdAt"] > newest:
newest = token["createdAt"]
# Freeze time. We don't want to misshandle item that are newer than the last check
if newest is not None:
if "createdAt" in query:
query["createdAt"] = {"$gt": continue_at_time, "$lte": newest}
else:
query["createdAt"] = {"$lte": newest}
# Find unique user ids
user_id_set = set()
for token in tokens.find(query):
user_id_set.add(token["data"]["user_id"])
user_ids = list(user_id_set)
# Store the time stamp for newest
with open(filename, "wb") as file:
pickle.dump(newest, file)
for user_id in user_ids:
new_query = query.copy()
new_query["data.user_id"] = user_id
newest_entry = None
object_id = None
for token in tokens.find(new_query):
if newest_entry is None:
newest_entry = token["createdAt"]
object_id = token["_id"]
elif token["createdAt"] > newest_entry:
newest_entry = token["createdAt"]
object_id = token["_id"]
dataset_found = None
profile = dict()
if object_id is not None:
dataset_found = tokens.find_one({"_id": object_id})
extracted_user_id = dataset_found["data"]["user_id"]
profile["email"] = dataset_found["data"]["email"]
extracted_token = dataset_found["token"]
profile["url_string"] = (
f"{config['overleafdomain']}/user/activate?token={extracted_token}&user_id={extracted_user_id}"
)
results.append(profile)
return results

View file

@ -1,5 +0,0 @@
apk add mc
apk add bash
pip install docker
pip install pymongo
pip install email_validator

View file

@ -1,68 +0,0 @@
# pip install email_validator
import email.utils
from email_validator import validate_email # type: ignore
import email_validator
import json
def process_emails(
mail_to_process: list[dict],
config_file: str = "allowed_domains.json",
blocked_user_file: str = "blocked_users.json",
) -> list[dict]:
result: list[dict] = []
with open(config_file, "r") as file:
allowed_domains: dict = json.load(file)
with open(blocked_user_file, "r") as file:
blocked_users: dict = json.load(file)
for mail in mail_to_process:
temp = email.utils.parseaddr(mail["from"])[1]
if (temp != "") and (temp is not None):
email_status: bool = False
try:
emailinfo = validate_email(temp, check_deliverability=False)
email_status = True
temp = emailinfo.normalized
except email_validator.exceptions_types.EmailSyntaxError:
email_status = False
except email_validator.exceptions_types.EmailNotValidError:
email_status = False
domain_found = False
if email_status:
for domain in allowed_domains["allowed_domains"]:
if temp.endswith(domain):
domain_found = True
if domain_found:
for blocked_user in blocked_users["blocked_users"]:
if temp == blocked_user:
domain_found = False
if domain_found:
from_validated_ab = email.utils.parseaddr(mail["from"])
try:
from_validated = validate_email(
from_validated_ab[1], check_deliverability=False
)
result.append(
{
"from_a": from_validated_ab[0],
"from_b": from_validated.normalized,
"to": mail["to"],
"subject": mail["subject"],
}
)
except email_validator.exceptions_types.EmailSyntaxError:
pass
except email_validator.exceptions_types.EmailNotValidError:
pass
return result

View file

@ -1,81 +0,0 @@
import pymongo
import datetime
import os
import pickle
import email.utils
from email_validator import validate_email # type: ignore
import email_validator
def process_invitations(remove_old_entries: bool = False):
results: list[dict] = []
filename = "last_run_invitations.pkl"
now = datetime.datetime.now()
client = pymongo.MongoClient("overleafmongo", 27017)
db = client.sharelatex
project_invites = db.projectInvites
continue_at_time = None
if remove_old_entries:
if os.path.exists(filename):
with open(filename, "rb") as file:
continue_at_time = pickle.load(file)
query = {"expires": {"$gt": now}}
if continue_at_time is not None:
query["createdAt"] = {"$gt": continue_at_time}
newest = None
for project_invite in project_invites.find(query):
if newest is None:
newest = project_invite["createdAt"]
elif project_invite["createdAt"] > newest:
newest = project_invite["createdAt"]
# Freeze time. We don't want to misshandle item that are newer than the last check
if newest is not None:
if "createdAt" in query:
query["createdAt"] = {"$gt": continue_at_time, "$lte": newest}
else:
query["createdAt"] = {"$lte": newest}
# Find unique user ids
user_id_set = set()
for project_invite in project_invites.find(query):
user_id_set.add(project_invite["email"])
user_ids = list(user_id_set)
# Store the time stamp for newest
with open(filename, "wb") as file:
pickle.dump(newest, file)
for uid in user_ids:
from_validated_ab = email.utils.parseaddr(uid)
try:
from_validated = validate_email(
from_validated_ab[1], check_deliverability=False
)
results.append(
{
"from_a": None,
"from_b": from_validated.normalized,
"to": None,
"subject": None,
}
)
except email_validator.exceptions_types.EmailSyntaxError:
pass
except email_validator.exceptions_types.EmailNotValidError:
pass
return results
print(process_invitations())

View file

@ -1,21 +0,0 @@
import smtplib
from email.message import EmailMessage
def send_mail(
email_body: str,
email_subject: str,
email_from: str,
email_to: str,
smtpd_host: str = "overleafsmtpd",
):
msg = EmailMessage()
msg.set_content(email_body.decode("utf-8"))
msg["Subject"] = email_subject
msg["From"] = email_from
msg["To"] = email_to
s = smtplib.SMTP(smtpd_host)
s.send_message(msg)
s.quit()