Add files via upload
This commit is contained in:
parent
4f0bc24b73
commit
4c21d2fee6
3 changed files with 92 additions and 0 deletions
1
user_tools/config_mail.json
Normal file
1
user_tools/config_mail.json
Normal file
|
@ -0,0 +1 @@
|
|||
{"overleafdomain": "https://overleaf.neuro.uni-bremen.de"}
|
87
user_tools/get_activation_requests.py
Normal file
87
user_tools/get_activation_requests.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
import pymongo
|
||||
import datetime
|
||||
import os
|
||||
import pickle
|
||||
import json
|
||||
|
||||
|
||||
def get_activation_requests(remove_old_entries: bool = False) -> list[dict]:
|
||||
results: list[dict] = []
|
||||
|
||||
filename = "last_run.pkl"
|
||||
|
||||
with open("config_mail.json", "r") as file:
|
||||
config = json.load(file)
|
||||
|
||||
now = datetime.datetime.now()
|
||||
|
||||
client = pymongo.MongoClient("overleafmongo", 27017)
|
||||
db = client.sharelatex
|
||||
tokens = db.tokens
|
||||
|
||||
continue_at_time = None
|
||||
if remove_old_entries:
|
||||
if os.path.exists(filename):
|
||||
with open(filename, "rb") as file:
|
||||
continue_at_time = pickle.load(file)
|
||||
|
||||
query = {"expiresAt": {"$gt": now}}
|
||||
if continue_at_time is not None:
|
||||
query["createdAt"] = {"$gt": continue_at_time}
|
||||
|
||||
newest = None
|
||||
for token in tokens.find(query):
|
||||
if newest is None:
|
||||
newest = token["createdAt"]
|
||||
elif token["createdAt"] > newest:
|
||||
newest = token["createdAt"]
|
||||
|
||||
# Freeze time. We don't want to misshandle item that are newer than the last check
|
||||
if newest is not None:
|
||||
if "createdAt" in query:
|
||||
query["createdAt"] = {"$gt": continue_at_time, "$lte": newest}
|
||||
else:
|
||||
query["createdAt"] = {"$lte": newest}
|
||||
|
||||
# Find unique user ids
|
||||
user_id_set = set()
|
||||
|
||||
for token in tokens.find(query):
|
||||
user_id_set.add(token["data"]["user_id"])
|
||||
|
||||
user_ids = list(user_id_set)
|
||||
|
||||
# Store the time stamp for newest
|
||||
with open(filename, "wb") as file:
|
||||
pickle.dump(newest, file)
|
||||
|
||||
for user_id in user_ids:
|
||||
new_query = query.copy()
|
||||
new_query["data.user_id"] = user_id
|
||||
|
||||
newest_entry = None
|
||||
object_id = None
|
||||
|
||||
for token in tokens.find(new_query):
|
||||
|
||||
if newest_entry is None:
|
||||
newest_entry = token["createdAt"]
|
||||
object_id = token["_id"]
|
||||
elif token["createdAt"] > newest_entry:
|
||||
newest_entry = token["createdAt"]
|
||||
object_id = token["_id"]
|
||||
|
||||
dataset_found = None
|
||||
|
||||
profile = dict()
|
||||
if object_id is not None:
|
||||
dataset_found = tokens.find_one({"_id": object_id})
|
||||
extracted_user_id = dataset_found["data"]["user_id"]
|
||||
profile["email"] = dataset_found["data"]["email"]
|
||||
extracted_token = dataset_found["token"]
|
||||
profile["url_string"] = (
|
||||
f"{config['overleafdomain']}/user/activate?token={extracted_token}&user_id={extracted_user_id}"
|
||||
)
|
||||
results.append(profile)
|
||||
|
||||
return results
|
4
user_tools/prepare.sh
Normal file
4
user_tools/prepare.sh
Normal file
|
@ -0,0 +1,4 @@
|
|||
apk add mc
|
||||
pip install docker
|
||||
pip install pymongo
|
||||
pip install email_validator
|
Loading…
Reference in a new issue