Commit 1563a769 authored by Oleg Borisenko's avatar Oleg Borisenko
Browse files

routes for backoffice; fix for CPython aio bug.

parent e05467d0
......@@ -11,6 +11,7 @@ PasteDeploy==2.1.1
plaster==1.0
plaster-pastedeploy==0.7
psycopg2-binary==2.8.6
psutil==5.8.0
Pygments==2.8.1
pyramid==2.0
pyramid-debugtoolbar==4.9
......
......@@ -32,6 +32,9 @@ class BackupTarget(Base):
last_scan_time = Column(DateTime)
fullpath = Column(Text, primary_key=True) # it's meant to be local mount fullpath
def to_dict(self):
return {c.name: str(getattr(self, c.name)) for c in self.__table__.columns}
def scan(self, dbsession):
now = time.time()
if not self.enabled:
......
......@@ -39,6 +39,9 @@ class Batch(Base):
lazy='select')
# files_to_backup = association_proxy("CopyQueue", "batch", creator=lambda bat: CopyQueue(batch=bat))
def to_dict(self):
return {c.name: str(getattr(self, c.name)) for c in self.__table__.columns}
def change_status(self, status):
self.status = status
return
......
......@@ -44,6 +44,9 @@ class FileToBackup(Base):
# batches = association_proxy("CopyQueue", "file_to_backup", creator=lambda file_to_back: CopyQueue(file_to_backup=file_to_back))
relative_path = Column(Text, primary_key=True) # filepath suffix after backup target fullpath
def to_dict(self):
return {c.name: str(getattr(self, c.name)) for c in self.__table__.columns}
@staticmethod
def add_files(file_list, dbsession):
for i in file_list:
......
......@@ -7,5 +7,9 @@ def includeme(config):
config.add_route('identify_tape', '/identify_tape')
config.add_route('use_tape_for_backup', '/use_tape_for_backup')
config.add_route('use_tape_for_restore', '/use_tape_for_restore')
config.add_route('list_backup_targets', '/list_backup_targets')
config.add_route('test_behavior', '/test_behavior')
config.add_route('copy_status', '/copy_status')
config.add_route('batches', '/batches')
config.add_route('batch_info', '/batch/{batch_id}')
config.add_route('file_location', '/file_location')
\ No newline at end of file
......@@ -179,7 +179,6 @@ def main(argv=sys.argv):
args = parse_args(argv)
setup_logging(args.config_uri)
env = bootstrap(args.config_uri)
loop = asyncio.get_event_loop()
try:
while True:
queue_len = 0
......@@ -196,6 +195,8 @@ def main(argv=sys.argv):
if not queue_len:
continue
while queue_len > 0:
# instantiating here and not global due to CPython bug (https://bugs.python.org/issue40634)
loop = asyncio.get_event_loop()
if queue_len - job_size <= 0:
current_job = queue_len
else:
......@@ -206,6 +207,7 @@ def main(argv=sys.argv):
iterations += current_job
log.info("%d iterations per %d seconds (lazy load)", job_size, time.time() - now)
now = time.time()
loop.close()
time.sleep(2)
except SQLAlchemyError as e:
......@@ -214,8 +216,6 @@ def main(argv=sys.argv):
except Exception as e:
log.error(e)
raise
finally:
loop.close()
if __name__ == '__main__':
main()
\ No newline at end of file
......@@ -22,5 +22,12 @@ def add_backup_target(request):
return Response(json_body={"error": e.detail}, content_type='application/json', status=e.status)
return
def list_backup_targets():
return
\ No newline at end of file
@view_config(route_name='list_backup_targets', renderer='json', request_method='GET')
def list_backup_targets(request):
try:
targets = [x.to_dict() for x in request.dbsession.query(models.backuptarget.BackupTarget).all()]
return targets
except SQLAlchemyError as e:
return Response(json_body={"error": e._message()}, content_type='application/json', status=500)
except HTTPException as e:
return Response(json_body={"error": e.detail}, content_type='application/json', status=e.status)
import psutil
from pyramid.view import view_config
from pyramid.response import Response
from pyramid.httpexceptions import HTTPException
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.sql import func
from sqlalchemy.types import BIGINT
from .. import models
# for current batch: copied vs remaining
def copy_status():
return
@view_config(route_name='copy_status', renderer='json', request_method='GET')
def copy_status(request):
try:
backup_is_running = False
for proc in psutil.process_iter():
try:
# Check if process name contains the given name string.
if 'python' in proc.name():
if any('backup_daemon.py' in x for x in proc.cmdline()):
backup_is_running = True
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
pass
total = request.dbsession.query(func.sum(models.FileToBackup.fsize).cast(BIGINT)).scalar()
copied = request.dbsession.query(func.sum(models.FileToBackup.fsize).cast(BIGINT)).filter(models.FileToBackup.tape_label != None).scalar()
except SQLAlchemyError as e:
return Response(json_body={"error": e._message()}, content_type='application/json', status=500)
except HTTPException as e:
return Response(json_body={"error": e.detail}, content_type='application/json', status=e.status)
return {"backup_is_running": backup_is_running,
"total": total,
"total_gb": round(total / (1024**3), 2),
"copied": copied,
"copied_gb": round(copied / (1024**3), 2)}
# all batches list by time with status
def batches():
return
@view_config(route_name='batches', renderer='json', request_method='GET')
def batches(request):
try:
batches = [x.to_dict() for x in request.dbsession.query(models.Batch).all()]
except SQLAlchemyError as e:
return Response(json_body={"error": e._message()}, content_type='application/json', status=500)
except HTTPException as e:
return Response(json_body={"error": e.detail}, content_type='application/json', status=e.status)
return batches
@view_config(route_name='file_location', renderer='json', request_method='POST')
def file_location(request):
file_list = request.json_body
files = request.dbsession.query(models.FileToBackup).filter(models.FileToBackup.relative_path.in_(file_list)).all()
return [f.to_dict() for f in files]
# batch info -- for batch id
def batch_info():
@view_config(route_name='batch_info', renderer='json', request_method='GET')
def batch_info(request):
return
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment