#!/usr/bin/python3

import asyncio
import base64
import binascii
import cryptography.hazmat.backends
import cryptography.hazmat.primitives.asymmetric.ec
import cryptography.hazmat.primitives.asymmetric.utils
import cryptography.hazmat.primitives.ciphers
import cryptography.hazmat.primitives.ciphers.aead
import cryptography.hazmat.primitives.hashes
import cryptography.hazmat.primitives.kdf.hkdf
import cryptography.hazmat.primitives.serialization
import datetime
import email.utils
import functools
import json
import ldap
import logging
import os
import pickle
import struct
import threading
import time
import urllib.parse

import tornado.locale

import sqlalchemy
from sqlalchemy import BigInteger, Boolean, Column, DateTime, ForeignKey, Integer
from sqlalchemy import Interval, LargeBinary, Text, UUID

from . import base
from . import bugtracker
from . import builds
from . import database
from . import httpclient
from . import jobs
from . import packages
from . import repos
from . import uploads

from .decorators import *

DEFAULT_STORAGE_QUOTA = 256 * 1024 * 1024 # 256 MiB

# Setup logging
log = logging.getLogger("pbs.users")

# A list of LDAP attributes that we fetch
LDAP_ATTRS = (
	# UID
	"uid",

	# Common Name
	"cn",

	# First & Last Name
	"givenName", "sn",

	# Email Addresses
	"mail",
	"mailAlternateAddress",
)

class QuotaExceededError(Exception):
	pass

class Users(base.Object):
	def init(self):
		# Initialize thread-local storage
		self.local = threading.local()

	@property
	def ldap(self):
		if not hasattr(self.local, "ldap"):
			# Fetch the LDAP URI
			ldap_uri = self.backend.config.get("ldap", "uri")

			log.debug("Connecting to %s..." % ldap_uri)

			# Establish LDAP connection
			self.local.ldap = ldap.initialize(ldap_uri)

		return self.local.ldap

	async def __aiter__(self):
		users = await self._get_users("""
			SELECT
				*
			FROM
				users
			WHERE
				deleted_at IS NULL
			ORDER BY
				name
			""",
		)

		return aiter(users)

	def _ldap_query(self, query, attrlist=None, limit=0, search_base=None):
		search_base = self.backend.config.get("ldap", "base")

		log.debug("Performing LDAP query (%s): %s" % (search_base, query))

		t = time.time()

		# Ask for up to 512 results being returned at a time
		page_control = ldap.controls.SimplePagedResultsControl(True, size=512, cookie="")

		results = []
		pages = 0

		# Perform the search
		while True:
			response = self.ldap.search_ext(search_base,
				ldap.SCOPE_SUBTREE, query, attrlist=attrlist, sizelimit=limit,
				serverctrls=[page_control],
			)

			# Fetch all results
			type, data, rmsgid, serverctrls = self.ldap.result3(response)

			# Append to local copy
			results += data
			pages += 1

			controls = [c for c in serverctrls
				if c.controlType == ldap.controls.SimplePagedResultsControl.controlType]

			if not controls:
				break

			# Set the cookie for more results
			page_control.cookie = controls[0].cookie

			# There are no more results
			if not page_control.cookie:
				break

			# Log time it took to perform the query
			log.debug("Query took %.2fms (%s page(s))" % ((time.time() - t) * 1000.0, pages))

		# Return all attributes (without the DN)
		return [attrs for dn, attrs in results]

	def _ldap_get(self, *args, **kwargs):
		results = self._ldap_query(*args, **kwargs)

		# No result
		if not results:
			return {}

		# Too many results?
		elif len(results) > 1:
			raise OverflowError("Too many results returned for ldap_get()")

		return results[0]

	async def create(self, name, notify=False, storage_quota=None):
		"""
			Creates a new user
		"""
		# Set default for storage quota
		if storage_quota is None:
			storage_quota = DEFAULT_STORAGE_QUOTA

		# Insert into database
		user = await self.db.insert(
			User,
			name          = name,
			storage_quota = storage_quota,
		)

		log.debug("Created user %s" % user)

		# Send a welcome email
		if notify:
			await user._send_welcome_email()

		return user

	async def get_by_name(self, name):
		"""
			Fetch a user by its username
		"""
		stmt = (
			sqlalchemy
			.select(User)
			.where(
				User.deleted_at == None,
				User.name == name,
			)
		)

		# Fetch the user from the database
		user = await self.db.fetch_one(stmt)
		if user:
			return user

		# Do nothing in test mode
		if self.backend.test:
			log.warning("Cannot use get_by_name test mode")
			return

		# Search in LDAP
		res = self._ldap_get(
			"(&"
				"(objectClass=person)"
				"(uid=%s)"
			")" % name,
			attrlist=("uid",),
		)
		if not res:
			return

		# Fetch the UID
		uid = res.get("uid")[0].decode()

		# Create a new user
		return await self.create(uid)

	async def get_by_email(self, mail):
		# Strip any excess stuff from the email address
		name, mail = email.utils.parseaddr(mail)

		# Do nothing in test mode
		if self.backend.test:
			log.warning("Cannot use get_by_email in test mode")
			return

		# Search in LDAP
		try:
			res = self._ldap_get(
				"(&"
					"(objectClass=person)"
					"(|"
						"(mail=%s)"
						"(mailAlternateAddress=%s)"
					")"
				")" % (mail, mail),
				attrlist=("uid",),
			)

		except OverflowError as e:
			raise OverflowError("Too many results for search for %s" % mail) from e

		# No results
		if not res:
			return

		# Fetch the UID
		uid = res.get("uid")[0].decode()

		return await self.get_by_name(uid)

	async def _search_by_email(self, mails, include_missing=True):
		"""
			Takes a list of email addresses and returns all users that could be found
		"""
		users = []

		for mail in mails:
			user = await self.get_by_email(mail)

			# Include the search string if no user could be found
			if not user and include_missing:
				user = mail

			# Skip any duplicates
			if user in users:
				continue

			users.append(user)

		return users

	async def search(self, q, limit=None):
		# Do nothing in test mode
		if self.backend.test:
			log.warning("Cannot search for users in test mode")
			return []

		# Search for an exact match
		user = await self.get_by_name(q)
		if user:
			return [user]

		res = self._ldap_query(
			"(&"
				"(objectClass=person)"
				"(|"
					"(uid=%s)"
					"(cn=*%s*)"
					"(mail=%s)"
					"(mailAlternateAddress=%s)"
				")"
			")" % (q, q, q, q),
			attrlist=("uid",),
			limit=limit,
		)

		# Fetch users
		stmt = (
			sqlalchemy
			.select(User)
			.where(
				User.deleted_at == None,
				User.name in [row.get("uid")[0].decode() for row in res],
			)
			.order_by(
				User.name,
			)
		)

		# Return as list
		return await self.db.fetch_as_list(stmt)

	@functools.cached_property
	def build_counts(self):
		"""
			Returns a CTE that maps the user ID and the total number of builds
		"""
		return (
			sqlalchemy
			.select(
				# User ID
				builds.Build.owner_id.label("user_id"),

				# Count all builds
				sqlalchemy.func.count(
					builds.Build.id
				).label("count"),
			)
			.where(
				builds.Build.owner_id != None,
				builds.Build.test == False,
			)
			.group_by(
				builds.Build.owner_id,
			)
			.cte("build_counts")
		)

	async def get_top(self, limit=50):
		"""
			Returns the top users (with the most builds)
		"""
		stmt = (
			sqlalchemy
			.select(User)
			.join(
				self.build_counts,
				self.build_counts.c.user_id == User.id,
			)
			.where(
				User.deleted_at == None,
			)
			.order_by(
				self.build_counts.c.count.desc(),
			)
			.limit(50)
		)

		# Run the query
		return await self.db.fetch_as_list(stmt)

	@functools.cached_property
	def build_times(self):
		"""
			This is a CTE to easily access a user's consumed build time in the last 24 hours
		"""
		return (
			sqlalchemy

			.select(
				# Fetch the user by its ID
				User.id.label("user_id"),

				# Sum up the total build time
				sqlalchemy.func.sum(
					sqlalchemy.func.coalesce(
						jobs.Job.finished_at,
						sqlalchemy.func.current_timestamp()
					)
					- jobs.Job.started_at,
				).label("used_build_time"),
			)

			# Join builds & jobs
			.join(
				builds.Build,
				builds.Build.owner_id == User.id,
			)
			.join(
				jobs.Job,
				jobs.Job.build_id == builds.Build.id,
			)

			# Filter out some things
			.where(
				User.deleted_at == None,
				User.daily_build_quota != None,

				# Jobs must have been started
				jobs.Job.started_at != None,

				sqlalchemy.or_(
					jobs.Job.finished_at == None,
					jobs.Job.finished_at >=
						sqlalchemy.func.current_timestamp() - datetime.timedelta(hours=24),
				),
			)

			# Group by user
			.group_by(
				User.id,
			)

			# Make this into a CTE
			.cte("user_build_times")
		)

	@functools.cached_property
	def exceeded_quotas(self):
		return (
			sqlalchemy

			.select(
				User.id,
				self.build_times.c.used_build_time,
			)
			.where(
				#User.daily_build_quota != None,
				self.build_times.c.used_build_time >= User.daily_build_quota,
			)

			# Make this into a CTE
			.cte("user_exceeded_quotas")
		)

	# Push Notifications

	@property
	def vapid_public_key(self):
		"""
			The public part of the VAPID key
		"""
		return self.backend.config.get("vapid", "public-key")

	@property
	def vapid_private_key(self):
		"""
			The private part of the VAPID key
		"""
		return self.backend.config.get("vapid", "private-key")

	@functools.cache
	def get_application_server_key(self):
		"""
			Generates the key that we are sending to the client
		"""
		lines = []

		for line in self.vapid_public_key.splitlines():
			if line.startswith("-"):
				continue

			lines.append(line)

		# Join everything together
		key = "".join(lines)

		# Decode the key
		key = base64.b64decode(key)

		# Only take the last bit
		key = key[-65:]

		# Encode the key URL-safe
		key = base64.urlsafe_b64encode(key).strip(b"=")

		# Return as string
		return key.decode()


class User(database.Base, database.BackendMixin, database.SoftDeleteMixin):
	__tablename__ = "users"

	def __str__(self):
		return self.realname or self.name

	def __hash__(self):
		return hash(self.id)

	def __lt__(self, other):
		if isinstance(other, self.__class__):
			return self.name < other.name

		elif isinstance(other, str):
			return self.name < other

		return NotImplemented

	def to_json(self):
		return {
			"name"     : self.name,
		}

	# ID

	id = Column(Integer, primary_key=True)

	# Name

	name = Column(Text, nullable=False)

	# Link

	@property
	def link(self):
		return "/users/%s" % self.name

	async def delete(self):
		await self._set_attribute("deleted", True)

		# Destroy all sessions
		for session in self.sessions:
			session.destroy()

	# Fetch any attributes from LDAP

	@functools.cached_property
	def attrs(self):
		# Use the stored attributes (only used in the test environment)
		#if self.data._attrs:
		#	return pickle.loads(self.data._attrs)
		#
		return self.backend.users._ldap_get("(uid=%s)" % self.name, attrlist=LDAP_ATTRS)

	def _get_attrs(self, key):
		return [v.decode() for v in self.attrs.get(key, [])]

	def _get_attr(self, key):
		for value in self._get_attrs(key):
			return value

	# Realname

	@property
	def realname(self):
		return self._get_attr("cn") or ""

	@property
	def email(self):
		"""
			The primary email address
		"""
		return self._get_attr("mail")

	@property
	def email_to(self):
		"""
			The name/email address of the user in MIME format
		"""
		return email.utils.formataddr((
			self.realname or self.name,
			self.email or "invalid@invalid.tld",
		))

	async def send_email(self, *args, **kwargs):
		return await self.backend.messages.send_template(
			*args,
			recipient=self,
			locale=self.locale,
			**kwargs,
		)

	async def _send_welcome_email(self):
		"""
			Sends a welcome email to the user
		"""
		await self.send_email("users/messages/welcome.txt")

	# Admin

	admin = Column(Boolean, nullable=False, default=False)

	# Admin?

	def is_admin(self):
		return self.admin is True

	# Locale

	@property
	def locale(self):
		return tornado.locale.get()

	# Avatar

	def avatar(self, size=512):
		"""
			Returns a URL to the avatar the user has uploaded
		"""
		return "https://people.ipfire.org/users/%s.jpg?size=%s" % (self.name, size)

	# Permissions

	def has_perm(self, user):
		"""
			Check, if the given user has the right to perform administrative
			operations on this user.
		"""
		# Anonymous people have no permission
		if user is None:
			return False

		# Admins always have permission
		if user.is_admin():
			return True

		# Users can edit themselves
		if user == self:
			return True

		# No permission
		return False

	# Sessions

	sessions = sqlalchemy.orm.relationship("Session", back_populates="user")

	# Bugzilla API Key

	bugzilla_api_key = Column(Text)

	# Bugzilla

	async def connect_to_bugzilla(self, api_key):
		bz = bugtracker.Bugzilla(self.backend, api_key)

		# Does the API key match with this user?
		if not self.email == await bz.whoami():
			raise ValueError("The API key does not belong to %s" % self)

		# Store the API key
		self.bugzilla_api_key = api_key

	@functools.cached_property
	def bugzilla(self):
		"""
			Connection to Bugzilla as this user
		"""
		if self.bugzilla_api_key:
			return bugtracker.Bugzilla(self.backend, self.bugzilla_api_key)

	# Build Quota

	daily_build_quota = Column(Interval)

	# Build Times

	async def get_used_daily_build_quota(self):
		# Fetch the build time from the CTE
		stmt = (
			sqlalchemy
			.select(
				self.backend.users.build_times.c.used_build_time,
			)
			.where(
				self.backend.users.build_times.c.user_id == self.id,
			)
		)

		# Fetch the result
		return await self.db.select_one(stmt, "used_build_time") or datetime.timedelta(0)

	async def has_exceeded_build_quota(self):
		if not self.daily_build_quota:
			return False

		return await self.get_used_daily_build_quota() >= self.daily_build_quota

	# Storage Quota

	storage_quota = Column(BigInteger)

	async def has_exceeded_storage_quota(self, size=None):
		"""
			Returns True if this user has exceeded their quota
		"""
		# Skip quota check if this user has no quota
		if not self.storage_quota:
			return

		return await self.get_disk_usage() + (size or 0) >= self.storage_quota

	async def check_storage_quota(self, size=None):
		"""
			Determines the user's disk usage
			and raises an exception when the user is over quota.
		"""
		# Raise QuotaExceededError if this user is over quota
		if self.has_exceeded_storage_quota(size=size):
			raise QuotaExceededError

	async def get_disk_usage(self):
		"""
			Returns the total disk usage of this user
		"""
		source_packages = sqlalchemy.orm.aliased(packages.Package)
		binary_packages = sqlalchemy.orm.aliased(packages.Package)

		# Uploads
		upload_disk_usage = (
			sqlalchemy
			.select(
				uploads.Upload.size
			)
			.where(
				uploads.Upload.user == self,
				uploads.Upload.expires_at > sqlalchemy.func.current_timestamp(),
			)
		)

		# Source Packages
		source_package_disk_usage = (
			sqlalchemy
			.select(
				source_packages.filesize
			)
			.select_from(
				builds.Build,
			)
			.join(
				source_packages,
				source_packages.id == builds.Build.pkg_id,
			)
			.where(
				# All objects must exist
				source_packages.deleted_at == None,
				builds.Build.deleted_at == None,

				# Don't consider test builds
				builds.Build.test == False,

				# The build must be owned by the user
				builds.Build.owner == self,
			)
		)

		# Binary Packages
		binary_package_disk_usage = (
			sqlalchemy
			.select(
				binary_packages.filesize,
			)
			.select_from(
				builds.Build,
			)
			.join(
				jobs.Job,
				jobs.Job.build_id == builds.Build.id,
			)
			.join(
				jobs.JobPackage,
				jobs.JobPackage.job_id == jobs.Job.id,
			)
			.join(
				binary_packages,
				binary_packages.id == jobs.JobPackage.pkg_id,
			)
			.where(
				# All objects must exist
				binary_packages.deleted_at == None,
				builds.Build.deleted_at == None,
				jobs.Job.deleted_at == None,

				# Don't consider test builds
				builds.Build.test == False,

				# The build must be owned by the user
				builds.Build.owner == self,
			)
		)

		# Build Logs
		build_log_disk_usage = (
			sqlalchemy
			.select(
				jobs.Job.log_size
			)
			.select_from(
				builds.Build,
			)
			.join(
				jobs.Job,
				jobs.Job.build_id == builds.Build.id,
			)
			.where(
				# All objects must exist
				builds.Build.deleted_at == None,
				jobs.Job.deleted_at == None,

				# Don't consider test builds
				builds.Build.test == False,

				# The build must be owned by the user
				builds.Build.owner == self,
			)
		)

		# Pull everything together
		disk_usage = (
			sqlalchemy
			.union_all(
				upload_disk_usage,
				source_package_disk_usage,
				binary_package_disk_usage,
				build_log_disk_usage,
			)
			.cte("disk_usage")
		)

		# Add it all up
		stmt = (
			sqlalchemy
			.select(
				sqlalchemy.func.sum(
					disk_usage.c.size
				).label("disk_usage"),
			)
		)

		# Run the query
		return await self.db.select_one(stmt, "disk_usage") or 0

	# Stats

	async def get_total_builds(self):
		stmt = (
			sqlalchemy
			.select(
				self.backend.users.build_counts.c.count.label("count"),
			)
			.select_from(self.backend.users.build_counts)
			.where(
				self.backend.users.build_counts.c.user_id == self.id,
			)
		)

		# Run the query
		return await self.db.select_one(stmt, "count") or 0

	async def get_total_build_time(self):
		"""
			Returns the total build time
		"""
		stmt = (
			sqlalchemy
			.select(
				sqlalchemy.func.sum(
					sqlalchemy.func.coalesce(
						jobs.Job.finished_at,
						sqlalchemy.func.current_timestamp()
					)
					- jobs.Job.started_at,
				).label("total_build_time")
			)
			.join(
				builds.Build,
				builds.Build.id == jobs.Job.build_id,
			)
			.where(
				jobs.Job.started_at != None,
				builds.Build.owner == self,
			)
		)

		return await self.db.select_one(stmt, "total_build_time")

	# Custom repositories

	async def get_repos(self, distro=None):
		"""
			Returns all custom repositories
		"""
		stmt = (
			sqlalchemy
			.select(repos.Repo)
			.where(
				repos.Repo.deleted_at == None,
				repos.Repo.owner == self,
			)
			.order_by(
				repos.Repo.name,
			)
		)

		# Filter by distribution
		if distro:
			stmt = stmt.where(
				repos.Repo.distro == distro,
			)

		return await self.db.fetch_as_list(stmt)

	async def get_repo(self, distro, slug=None):
		"""
			Fetches a single repository
		"""
		# Return the "home" repository if slug is empty
		if slug is None:
			slug = self.name

		stmt = (
			sqlalchemy
			.select(repos.Repo)
			.where(
				repos.Repo.deleted_at == None,
				repos.Repo.owner == self,
				repos.Repo.distro == distro,
				repos.Repo.slug == slug,
			)
		)

		return await self.db.fetch_one(stmt)

	# Uploads

	def get_uploads(self):
		"""
			Returns all uploads that belong to this user
		"""
		stmt = (
			sqlalchemy
			.select(uploads.Upload)
			.where(
				uploads.Upload.user == self,
				uploads.Upload.expires_at > sqlalchemy.func.current_timestamp(),
			)
			.order_by(
				uploads.Upload.created_at.desc(),
			)
		)

		return self.db.fetch(stmt)

	# Push Subscriptions

	async def is_subscribed(self):
		"""
			Returns True if the user is subscribed.
		"""
		subscriptions = await self.get_subscriptions()

		return True if subscriptions else False

	async def get_subscriptions(self):
		"""
			Fetches all current subscriptions
		"""
		stmt = (
			sqlalchemy
			.select(
				UserPushSubscription,
			)
			.where(
				UserPushSubscription.user == self,
			)
			.order_by(
				UserPushSubscription.created_at.asc(),
			)
		)

		return await self.db.fetch_as_list(stmt)

	async def subscribe(self, endpoint, p256dh, auth, user_agent=None):
		"""
			Creates a new subscription for this user
		"""
		_ = self.locale.translate

		# Decode p256dh
		if not isinstance(p256dh, bytes):
			p256dh = base64.urlsafe_b64decode(p256dh + "==")

		# Decode auth
		if not isinstance(auth, bytes):
			auth = base64.urlsafe_b64decode(auth + "==")

		# Insert into the database
		subscription = await self.db.insert(
			UserPushSubscription,
			user       = self,
			user_agent = user_agent,
			endpoint   = endpoint,
			p256dh     = p256dh,
			auth       = auth,
		)

		# Log action
		log.info("%s subscribed to push notifications" % self)

		# Send a message
		await subscription.send(
			_("Hello, %s!") % self,
			_("You have successfully subscribed to push notifications."),
		)

		return subscription

	async def send_push_message(self, *args, **kwargs):
		"""
			Sends a message to all active subscriptions
		"""
		subscriptions = await self.get_subscriptions()

		# Return early if there are no subscriptions
		if not subscriptions:
			return False

		# Send the message to all subscriptions
		for subscription in subscriptions:
			await subscription.send(*args, **kwargs)

		return True


class UserPushSubscription(database.Base, database.BackendMixin):
	__tablename__ = "user_push_subscriptions"

	# ID

	id = Column(Integer, primary_key=True)

	# User ID

	user_id = Column(Integer, ForeignKey("users.id"), nullable=False)

	# User

	user = sqlalchemy.orm.relationship("User", lazy="joined", innerjoin=True)

	# UUID

	uuid = Column(UUID, unique=True, nullable=False,
		server_default=sqlalchemy.func.gen_random_uuid())

	# Created At

	created_at = Column(DateTime(timezone=False), nullable=False,
		server_default=sqlalchemy.func.current_timestamp())

	# User Agent

	user_agent = Column(Text)

	# Endpoint

	endpoint = Column(Text, nullable=False)

	# P256DH

	p256dh = Column(LargeBinary, nullable=False)

	# Auth

	auth = Column(LargeBinary, nullable=False)

	@property
	def vapid_private_key(self):
		return cryptography.hazmat.primitives.serialization.load_pem_private_key(
			self.backend.users.vapid_private_key.encode(),
			password=None,
			backend=cryptography.hazmat.backends.default_backend(),
		)

	@property
	def vapid_public_key(self):
		return self.vapid_private_key.public_key()

	async def send(self, title, body, ttl=None):
		"""
			Sends a message to the user using the push service
		"""
		message = {
			"title" : title,
			"body"  : body,
		}

		# Convert dict() to JSON
		message = json.dumps(message)

		# Encrypt the message
		message = self._encrypt(message)

		# Create a signature
		signature = self._sign()

		# Encode the public key
		crypto_key = self.b64encode(
			self.vapid_public_key.public_bytes(
                cryptography.hazmat.primitives.serialization.Encoding.X962,
                cryptography.hazmat.primitives.serialization.PublicFormat.UncompressedPoint,
            )
		).decode()

		# Form request headers
		headers = {
			"Authorization"    : "WebPush %s" % signature,
			"Crypto-Key"       : "p256ecdsa=%s" % crypto_key,

			"Content-Type"      : "application/octet-stream",
			"Content-Encoding"  : "aes128gcm",
			"TTL"               : "%s" % (ttl or 0),
		}

		# Send the request
		try:
			await self.backend.httpclient.fetch(self.endpoint, method="POST",
				headers=headers, body=message)

		except httpclient.HTTPError as e:
			# 410 - Gone
			# The subscription is no longer valid
			if e.code == 410:
				# Let's just delete ourselves
				await self.delete()
				return

			# Raise everything else
			raise e

	async def delete(self):
		"""
			Deletes this subscription
		"""
		# Immediately delete it
		await self.db.delete(self)

	def _sign(self):
		elements = []

		for element in (self._jwt_info, self._jwt_data):
			# Format the dictionary
			element = json.dumps(element, separators=(',', ':'), sort_keys=True)

			# Encode to bytes
			element = element.encode()

			# Encode URL-safe in base64 and remove any padding
			element = self.b64encode(element)

			elements.append(element)

		# Concatenate
		token = b".".join(elements)

		log.debug("String to sign: %s" % token)

		# Create the signature
		signature = self.vapid_private_key.sign(
			token,
			cryptography.hazmat.primitives.asymmetric.ec.ECDSA(
				cryptography.hazmat.primitives.hashes.SHA256(),
			),
		)

		# Decode the signature
		r, s = cryptography.hazmat.primitives.asymmetric.utils.decode_dss_signature(signature)

		# Encode the signature in base64
		signature = self.b64encode(
			self._num_to_bytes(r, 32) + self._num_to_bytes(s, 32),
		)

		# Put everything together
		signature = b"%s.%s" % (token, signature)
		signature = signature.decode()

		log.debug("Created signature: %s" % signature)

		return signature

	_jwt_info = {
		"typ" : "JWT",
		"alg" : "ES256",
	}

	@property
	def _jwt_data(self):
		# Parse the URL
		url = urllib.parse.urlparse(self.endpoint)

		# Let the signature expire after 12 hours
		expires = time.time() + (12 * 3600)

		return {
			"aud" : "%s://%s" % (url.scheme, url.netloc),
			"exp" : int(expires),
			"sub" : "mailto:info@ipfire.org",
		}

	@staticmethod
	def _num_to_bytes(n, pad_to):
		"""
			Returns the byte representation of an integer, in big-endian order.
		"""
		h = "%x" % n

		r = binascii.unhexlify("0" * (len(h) % 2) + h)
		return b"\x00" * (pad_to - len(r)) + r

	@staticmethod
	def _serialize_key(key):
		if isinstance(key, cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey):
			return key.private_bytes(
				cryptography.hazmat.primitives.serialization.Encoding.DER,
				cryptography.hazmat.primitives.serialization.PrivateFormat.PKCS8,
				cryptography.hazmat.primitives.serialization.NoEncryption(),
			)

		return key.public_bytes(
			cryptography.hazmat.primitives.serialization.Encoding.X962,
			cryptography.hazmat.primitives.serialization.PublicFormat.UncompressedPoint,
		)

	@staticmethod
	def b64encode(data):
		return base64.urlsafe_b64encode(data).strip(b"=")

	def _encrypt(self, message):
		"""
			This is an absolutely ugly monster of a function which will sign the message
		"""
		headers = {}

		# Encode everything as bytes
		if not isinstance(message, bytes):
			message = message.encode()

		# Generate some salt
		salt = os.urandom(16)

		record_size = 4096
		chunk_size = record_size - 17

		# The client's public key
		p256dh = cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.from_encoded_point(
			cryptography.hazmat.primitives.asymmetric.ec.SECP256R1(), bytes(self.p256dh),
		)

		# Generate an ephemeral server key
		server_private_key = cryptography.hazmat.primitives.asymmetric.ec.generate_private_key(
			cryptography.hazmat.primitives.asymmetric.ec.SECP256R1,
			cryptography.hazmat.backends.default_backend(),
		)
		server_public_key = server_private_key.public_key()

		context = b"WebPush: info\x00"

		# Serialize the client's public key
		context += p256dh.public_bytes(
			cryptography.hazmat.primitives.serialization.Encoding.X962,
			cryptography.hazmat.primitives.serialization.PublicFormat.UncompressedPoint,
		)

		# Serialize the server's public key
		context += server_public_key.public_bytes(
			cryptography.hazmat.primitives.serialization.Encoding.X962,
			cryptography.hazmat.primitives.serialization.PublicFormat.UncompressedPoint,
		)

		# Perform key derivation with ECDH
		secret = server_private_key.exchange(
			cryptography.hazmat.primitives.asymmetric.ec.ECDH(), p256dh,
		)

		# Derive more stuff
		hkdf_auth = cryptography.hazmat.primitives.kdf.hkdf.HKDF(
			algorithm=cryptography.hazmat.primitives.hashes.SHA256(),
			length=32,
			salt=self.auth,
			info=context,
			backend=cryptography.hazmat.backends.default_backend(),
		)
		secret = hkdf_auth.derive(secret)

		# Derive the signing key
		hkdf_key = cryptography.hazmat.primitives.kdf.hkdf.HKDF(
			algorithm=cryptography.hazmat.primitives.hashes.SHA256(),
			length=16,
			salt=salt,
			info=b"Content-Encoding: aes128gcm\x00",
			backend=cryptography.hazmat.backends.default_backend(),
		)
		encryption_key = hkdf_key.derive(secret)

		# Derive a nonce
		hkdf_nonce = cryptography.hazmat.primitives.kdf.hkdf.HKDF(
			algorithm=cryptography.hazmat.primitives.hashes.SHA256(),
			length=12,
			salt=salt,
			info=b"Content-Encoding: nonce\x00",
			backend=cryptography.hazmat.backends.default_backend(),
		)
		nonce = hkdf_nonce.derive(secret)

		result = b""
		chunks = 0

		while True:
			# Fetch a chunk
			chunk, message = message[:chunk_size], message[chunk_size:]
			if not chunk:
				break

			# Is this the last chunk?
			last = not message

			# Encrypt the chunk
			result += self._encrypt_chunk(encryption_key, nonce, chunks, chunk, last)

			# Kepp counting...
			chunks += 1

		# Fetch the public key
		key_id = server_public_key.public_bytes(
			cryptography.hazmat.primitives.serialization.Encoding.X962,
			cryptography.hazmat.primitives.serialization.PublicFormat.UncompressedPoint,
		)

		# Join the entire message together
		message = [
			salt,
			struct.pack("!L", record_size),
			struct.pack("!B", len(key_id)),
			key_id,
			result,
		]

		return b"".join(message)

	def _encrypt_chunk(self, key, nonce, counter, chunk, last=False):
		"""
			Encrypts one chunk
		"""
		# Make the IV
		iv = self._make_iv(nonce, counter)

		log.debug("Encrypting chunk %s: length = %s" % (counter + 1, len(chunk)))

		if last:
			chunk += b"\x02"
		else:
			chunk += b"\x01"

		# Setup AES GCM
		cipher = cryptography.hazmat.primitives.ciphers.Cipher(
			cryptography.hazmat.primitives.ciphers.algorithms.AES128(key),
			cryptography.hazmat.primitives.ciphers.modes.GCM(iv),
			backend=cryptography.hazmat.backends.default_backend(),
		)

		# Get the encryptor
		encryptor = cipher.encryptor()

		# Encrypt the chunk
		chunk = encryptor.update(chunk)

		# Finalize this round
		chunk += encryptor.finalize() + encryptor.tag

		return chunk

	@staticmethod
	def _make_iv(base, counter):
		mask, = struct.unpack("!Q", base[4:])

		return base[:4] + struct.pack("!Q", counter ^ mask)
