]>
git.ipfire.org Git - people/jschlag/pbs.git/blob - backend/uploads.py
3 from __future__
import division
12 import pakfire
.packages
18 from constants
import *
20 class Uploads(base
.Object
):
21 def get_by_uuid(self
, _uuid
):
22 upload
= self
.db
.get("SELECT id FROM uploads WHERE uuid = %s", _uuid
)
24 return Upload(self
.pakfire
, upload
.id)
27 uploads
= self
.db
.query("SELECT id FROM uploads ORDER BY time_started DESC")
29 return [Upload(self
.pakfire
, u
.id) for u
in uploads
]
32 for upload
in self
.get_all():
36 class Upload(base
.Object
):
37 def __init__(self
, pakfire
, id):
38 base
.Object
.__init
__(self
, pakfire
)
41 self
.data
= self
.db
.get("SELECT * FROM uploads WHERE id = %s", self
.id)
44 def create(cls
, pakfire
, filename
, size
, hash, builder
=None, user
=None):
45 assert builder
or user
47 id = pakfire
.db
.execute("INSERT INTO uploads(uuid, filename, size, hash) \
48 VALUES(%s, %s, %s, %s)", "%s" % uuid
.uuid4(), filename
, size
, hash)
51 pakfire
.db
.execute("UPDATE uploads SET builder_id = %s WHERE id = %s",
55 pakfire
.db
.execute("UPDATE uploads SET user_id = %s WHERE id = %s",
58 upload
= cls(pakfire
, id)
60 # Create space to where we save the data.
61 dirname
= os
.path
.dirname(upload
.path
)
62 if not os
.path
.exists(dirname
):
66 f
= open(upload
.path
, "w")
81 return self
.data
.filename
85 return os
.path
.join(UPLOADS_DIR
, self
.uuid
, self
.filename
)
93 return self
.data
.progress
/ self
.size
97 if self
.data
.builder_id
:
98 return self
.pakfire
.builders
.get_by_id(self
.data
.builder_id
)
102 if self
.data
.user_id
:
103 return self
.pakfire
.users
.get_by_id(self
.data
.user_id
)
105 def append(self
, data
):
106 # Check if the filesize was exceeded.
107 size
= os
.path
.getsize(self
.path
) + len(data
)
108 if size
> self
.data
.size
:
109 raise Exception, "Given filesize was exceeded for upload %s" % self
.uuid
111 logging
.debug("Writing %s bytes to %s" % (len(data
), self
.path
))
113 f
= open(self
.path
, "ab")
117 self
.db
.execute("UPDATE uploads SET progress = %s WHERE id = %s",
121 size
= os
.path
.getsize(self
.path
)
122 if not size
== self
.data
.size
:
123 logging
.error("Filesize is not okay: %s" % (self
.uuid
))
126 # Calculate a hash to validate the upload.
127 hash = misc
.calc_hash1(self
.path
)
129 if not self
.hash == hash:
130 logging
.error("Hash did not match: %s != %s" % (self
.hash, hash))
137 Update the status of the upload in the database to "finished".
139 # Check if the file was completely uploaded and the hash is correct.
140 # If not, the upload has failed.
141 if not self
.validate():
144 self
.db
.execute("UPDATE uploads SET finished = 'Y', time_finished = NOW() \
145 WHERE id = %s", self
.id)
150 # Remove the uploaded data.
151 path
= os
.path
.dirname(self
.path
)
152 if os
.path
.exists(path
):
153 shutil
.rmtree(path
, ignore_errors
=True)
155 # Delete the upload from the database.
156 self
.db
.execute("DELETE FROM uploads WHERE id = %s", self
.id)
159 def time_started(self
):
160 return self
.data
.time_started
163 def time_running(self
):
164 # Get the seconds since we are running.
166 time_running
= datetime
.datetime
.utcnow() - self
.time_started
167 time_running
= time_running
.total_seconds()
175 if not self
.time_running
:
178 return self
.data
.progress
/ self
.time_running
181 # Remove uploads that are older than 2 hours.
182 if self
.time_running
>= 3600 * 2: