]>
git.ipfire.org Git - people/jschlag/pbs.git/blob - src/buildservice/uploads.py
3 from __future__
import division
11 import pakfire
.packages
15 from . import packages
18 from .constants
import *
19 from .decorators
import *
21 class Uploads(base
.Object
):
22 def _get_upload(self
, query
, *args
):
23 res
= self
.db
.get(query
, *args
)
26 return Upload(self
.backend
, res
.id, data
=res
)
28 def _get_uploads(self
, query
, *args
):
29 res
= self
.db
.query(query
, *args
)
32 yield Upload(self
.backend
, row
.id, data
=row
)
35 uploads
= self
._get
_uploads
("SELECT * FROM uploads ORDER BY time_started DESC")
39 def get_by_uuid(self
, uuid
):
40 return self
._get
_upload
("SELECT * FROM uploads WHERE uuid = %s", uuid
)
42 def create(self
, filename
, size
, hash, builder
=None, user
=None):
43 assert builder
or user
45 # Create a random ID for this upload
46 uuid
= users
.generate_random_string(64)
48 upload
= self
._get
_upload
("INSERT INTO uploads(uuid, filename, size, hash) \
49 VALUES(%s, %s, %s, %s) RETURNING *", uuid
, filename
, size
, hash)
52 upload
.builder
= builder
57 # Create space to where we save the data.
58 dirname
= os
.path
.dirname(upload
.path
)
59 if not os
.path
.exists(dirname
):
63 f
= open(upload
.path
, "w")
69 for upload
in self
.get_all():
73 class Upload(base
.DataObject
):
86 return self
.data
.filename
90 return os
.path
.join(UPLOADS_DIR
, self
.uuid
, self
.filename
)
98 return self
.data
.progress
/ self
.size
102 def get_builder(self
):
103 if self
.data
.builder_id
:
104 return self
.backend
.builders
.get_by_id(self
.data
.builder_id
)
106 def set_builder(self
, builder
):
107 self
._set
_attribute
("builder_id", builder
.id)
109 builder
= lazy_property(get_builder
, set_builder
)
114 if self
.data
.user_id
:
115 return self
.backend
.users
.get_by_id(self
.data
.user_id
)
117 def set_user(self
, user
):
118 self
._set
_attribute
("user_id", user
.id)
120 user
= lazy_property(get_user
, set_user
)
122 def append(self
, data
):
123 # Check if the filesize was exceeded.
124 size
= os
.path
.getsize(self
.path
) + len(data
)
125 if size
> self
.data
.size
:
126 raise Exception, "Given filesize was exceeded for upload %s" % self
.uuid
128 logging
.debug("Writing %s bytes to %s" % (len(data
), self
.path
))
130 with
open(self
.path
, "ab") as f
:
133 self
._set
_attribute
("progress", size
)
136 size
= os
.path
.getsize(self
.path
)
137 if not size
== self
.data
.size
:
138 logging
.error("Filesize is not okay: %s" % (self
.uuid
))
141 # Calculate a hash to validate the upload.
142 hash = misc
.calc_hash1(self
.path
)
144 if not self
.hash == hash:
145 logging
.error("Hash did not match: %s != %s" % (self
.hash, hash))
152 Update the status of the upload in the database to "finished".
154 # Check if the file was completely uploaded and the hash is correct.
155 # If not, the upload has failed.
156 if not self
.validate():
159 self
._set
_attribute
("finished", True)
160 self
._set
_attribute
("time_finished", datetime
.datetime
.utcnow())
165 # Remove the uploaded data.
166 path
= os
.path
.dirname(self
.path
)
167 if os
.path
.exists(path
):
168 shutil
.rmtree(path
, ignore_errors
=True)
170 # Delete the upload from the database.
171 self
.db
.execute("DELETE FROM uploads WHERE id = %s", self
.id)
174 def time_started(self
):
175 return self
.data
.time_started
178 def time_running(self
):
179 # Get the seconds since we are running.
181 time_running
= datetime
.datetime
.utcnow() - self
.time_started
182 time_running
= time_running
.total_seconds()
190 if not self
.time_running
:
193 return self
.data
.progress
/ self
.time_running
196 # Remove uploads that are older than 2 hours.
197 if self
.time_running
>= 3600 * 2: